diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 00000000..8ecb2ae6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,6 @@ +### Summary of Issue: +### Expected behavior and actual behavior: +### Steps to reproduce the problem (should include model description file(s) or link to publi c repository): +### What is the changeset ID of the code, and the machine you are using: +### have you modified the code? If so, it must be committed and available for testing: +### Screen output or log file showing the error message and context: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..b68b1fb5 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,17 @@ +[ 50 character, one line summary ] + +[ Description of the changes in this commit. It should be enough + information for someone not following this development to understand. + Lines should be wrapped at about 72 characters. ] + +User interface changes?: [ No/Yes ] +[ If yes, describe what changed, and steps taken to ensure backward compatibilty ] + +Fixes: [Github issue #s] And brief description of each issue. + +Testing: + test removed: + unit tests: + system tests: + manual testing: + diff --git a/.github/workflows/python.yaml b/.github/workflows/python.yaml index 6a2b1929..0a05138d 100644 --- a/.github/workflows/python.yaml +++ b/.github/workflows/python.yaml @@ -23,5 +23,5 @@ jobs: if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Test with pytest run: | - export PYTHONPATH=$(pwd)/scripts + export PYTHONPATH=$(pwd)/scripts:$(pwd)/scripts/parse_tools pytest diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..4a395859 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,28 @@ +language: python + +python: + - "2.7" + - "3.6" + - "3.7" + +branches: + only: + - feature/capgen + +install: + - pip install pylint + +script: + - env PYTHONPATH=scripts:${PYTHONPATH} pylint --rcfile ./test/.pylintrc ./test/unit_tests/test_metadata_table.py + - env PYTHONPATH=scripts:${PYTHONPATH} pylint --rcfile ./test/.pylintrc ./test/unit_tests/test_metadata_scheme_file.py + - python test/unit_tests/test_metadata_table.py + - python test/unit_tests/test_metadata_scheme_file.py + +notifications: + email: + recipients: +# - schramm@ucar.edu + - dom.heinzeller@noaa.gov +# - goldy@ucar.edu + on_success: always # default: change + on_failure: always # default: always diff --git a/doc/HelloWorld/.gitignore b/doc/HelloWorld/.gitignore new file mode 100644 index 00000000..378eac25 --- /dev/null +++ b/doc/HelloWorld/.gitignore @@ -0,0 +1 @@ +build diff --git a/doc/HelloWorld/CMakeLists.txt b/doc/HelloWorld/CMakeLists.txt new file mode 100644 index 00000000..4b0cb208 --- /dev/null +++ b/doc/HelloWorld/CMakeLists.txt @@ -0,0 +1,189 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +PROJECT(HelloWorld) +ENABLE_LANGUAGE(Fortran) + +include(CMakeForceCompiler) + +SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}/cmake/modules) + +#----------------------------------------------------------------------------- +# +# Create list of SCHEME_FILES, HOST_FILES, and SUITE_FILES +# Paths should be relative to CMAKE_SOURCE_DIR (this file's directory) +# +#----------------------------------------------------------------------------- +SET(SCHEME_FILES "hello_scheme_files.txt") +FILE(STRINGS ${SCHEME_FILES} SCHEME_FILENAMES) +LIST(APPEND HOST_FILES "hello_world_mod") +LIST(APPEND SUITE_FILES "hello_world_suite.xml") +# HOST is the name of the executable we will build. +SET(HOST "${CMAKE_PROJECT_NAME}") +# HOST_PROGRAM is the name of the program source files (Fortran and metadata) +SET(HOST_PROGRAM "hello_world_host") + +#----------------------------------------------------------------------------- +# +# Set where the CCPP Framework lives +# +#----------------------------------------------------------------------------- +get_filename_component(HELLO_ROOT "${CMAKE_SOURCE_DIR}" DIRECTORY) +get_filename_component(CCPP_ROOT "${HELLO_ROOT}" DIRECTORY) + +#----------------------------------------------------------------------------- +############################################################################## +# +# End of project-specific input +# +############################################################################## +#----------------------------------------------------------------------------- + +# By default, no verbose output +SET(VERBOSITY 0 CACHE STRING "Verbosity level of output (default: 0)") +# By default, generated caps go in ccpp subdir +SET(CCPP_CAP_FILES "${CMAKE_BINARY_DIR}/ccpp" CACHE + STRING "Location of CCPP-generated cap files") + +SET(CCPP_FRAMEWORK ${CCPP_ROOT}/scripts) + +# Use rpaths on MacOSX +set(CMAKE_MACOSX_RPATH 1) + +#----------------------------------------------------------------------------- +# Set a default build type if none was specified +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + #message(STATUS "Setting build type to 'Debug' as none was specified.") + #set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build." FORCE) + message(STATUS "Setting build type to 'Release' as none was specified.") + set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) + + # Set the possible values of build type for cmake-gui + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + "MinSizeRel" "RelWithDebInfo") +endif() + +ADD_COMPILE_OPTIONS(-O0) + +# CMake 3 version: ADD_EXECUTABLE(${HOST} ${HOST_PROGRAM}.F90) +if (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") +# gfortran +# MESSAGE("gfortran being used.") + ADD_COMPILE_OPTIONS(-fcheck=all) + ADD_COMPILE_OPTIONS(-fbacktrace) + ADD_COMPILE_OPTIONS(-ffpe-trap=zero) + ADD_COMPILE_OPTIONS(-finit-real=nan) + ADD_COMPILE_OPTIONS(-ggdb) + ADD_COMPILE_OPTIONS(-ffree-line-length-none) + ADD_COMPILE_OPTIONS(-cpp) +elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "Intel") +# ifort +# MESSAGE("ifort being used.") + #ADD_COMPILE_OPTIONS(-check all) + ADD_COMPILE_OPTIONS(-fpe0) + ADD_COMPILE_OPTIONS(-warn) + ADD_COMPILE_OPTIONS(-traceback) + ADD_COMPILE_OPTIONS(-debug extended) + ADD_COMPILE_OPTIONS(-fpp) +elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "PGI") +# pgf90 +# MESSAGE("pgf90 being used.") + ADD_COMPILE_OPTIONS(-g) + ADD_COMPILE_OPTIONS(-Mipa=noconst) + ADD_COMPILE_OPTIONS(-traceback) + ADD_COMPILE_OPTIONS(-Mfree) + ADD_COMPILE_OPTIONS(-Mfptrap) + ADD_COMPILE_OPTIONS(-Mpreprocess) +else (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + message (FATAL_ERROR "This program has only been compiled with gfortran, pgf90 and ifort. If another compiler is needed, the appropriate flags must be added in ${CMAKE_SOURCE_DIR}/CMakeLists.txt") +endif (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + +#----------------------------------------------------------------------------- +# CMake Modules +# Set the CMake module path +list(APPEND CMAKE_MODULE_PATH "${CCPP_FRAMEWORK}/cmake") +#----------------------------------------------------------------------------- +# Set OpenMP flags for C/C++/Fortran +if (OPENMP) + include(detect_openmp) + detect_openmp() + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") + set (CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${OpenMP_Fortran_FLAGS}") + message(STATUS "Enable OpenMP support for C/C++/Fortran compiler") +else(OPENMP) + message (STATUS "Disable OpenMP support for C/C++/Fortran compiler") +endif() + +# Create metadata and source file lists +FOREACH(FILE ${SCHEME_FILENAMES}) + # add absolute pathnames to library list + string(REPLACE ".meta" ".F90" TEMP "${FILE}") + get_filename_component(ABS_PATH "${TEMP}" ABSOLUTE) + list(APPEND LIBRARY_LIST ${ABS_PATH}) + # CMake 3 version: target_sources(${HOST} PUBLIC ${ABS_PATH}) +ENDFOREACH(FILE) + +FOREACH(FILE ${HOST_FILES}) + LIST(APPEND HOST_METADATA "${FILE}.meta") + # add absolute pathnames to library list + get_filename_component(ABS_PATH "${FILE}.F90" ABSOLUTE) + LIST(APPEND HOST_SOURCE "${ABS_PATH}") +ENDFOREACH(FILE) +list(APPEND LIBRARY_LIST ${HOST_SOURCE}) +# CMake 3 version: target_sources(${HOST} PUBLIC ${HOST_SOURCE}) +string(REPLACE ";" ".meta," HOST_METADATA "${HOST_FILES}") +set(HOST_METADATA "${HOST_PROGRAM}.meta,${HOST_METADATA}.meta") + +# Run ccpp_capgen +set(CAPGEN_CMD "${CCPP_FRAMEWORK}/ccpp_capgen.py") +list(APPEND CAPGEN_CMD "--host-files") +list(APPEND CAPGEN_CMD "${HOST_METADATA}") +list(APPEND CAPGEN_CMD "--scheme-files") +list(APPEND CAPGEN_CMD "${SCHEME_FILES}") +list(APPEND CAPGEN_CMD "--suites") +list(APPEND CAPGEN_CMD "${SUITE_FILES}") +list(APPEND CAPGEN_CMD "--output-root") +list(APPEND CAPGEN_CMD "${CCPP_CAP_FILES}") +list(APPEND CAPGEN_CMD "--host-name") +list(APPEND CAPGEN_CMD "${HOST}") +while (VERBOSITY GREATER 0) + list(APPEND CAPGEN_CMD "--verbose") + MATH(EXPR VERBOSITY "${VERBOSITY} - 1") +endwhile () +string(REPLACE ";" " " CAPGEN_STRING "${CAPGEN_CMD}") +MESSAGE(STATUS "Running: ${CAPGEN_STRING}") +EXECUTE_PROCESS(COMMAND ${CAPGEN_CMD} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE CAPGEN_OUT ERROR_VARIABLE CAPGEN_OUT RESULT_VARIABLE RES) +MESSAGE(STATUS "${CAPGEN_OUT}") +if (RES EQUAL 0) + MESSAGE(STATUS "CCPP cap generation completed") +else(RES EQUAL 0) + MESSAGE(FATAL_ERROR "CCPP cap generation FAILED: result = ${RES}") +endif(RES EQUAL 0) + +# Retrieve the list of files from datatable.xml and set to CCPP_CAPS +set(DTABLE_CMD "${CCPP_FRAMEWORK}/ccpp_datafile.py") +list(APPEND DTABLE_CMD "${CCPP_CAP_FILES}/datatable.xml") +list(APPEND DTABLE_CMD "--ccpp-files") +list(APPEND DTABLE_CMD "--separator=\\;") +string(REPLACE ";" " " DTABLE_STRING "${DTABLE_CMD}") +MESSAGE(STATUS "Running: ${DTABLE_STRING}") +EXECUTE_PROCESS(COMMAND ${DTABLE_CMD} OUTPUT_VARIABLE CCPP_CAPS + RESULT_VARIABLE RES + OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE) +if (RES EQUAL 0) + MESSAGE(STATUS "CCPP cap files retrieved") +else(RES EQUAL 0) + MESSAGE(FATAL_ERROR "CCPP cap file retrieval FAILED: result = ${RES}") +endif(RES EQUAL 0) +FOREACH(FILE ${CCPP_CAPS}) + list(APPEND LIBRARY_LIST ${FILE}) +ENDFOREACH(FILE) +# CMake 3 version: target_sources(${HOST} PUBLIC ${CCPP_CAPS}) +add_library(HELLOLIB OBJECT ${LIBRARY_LIST}) +ADD_EXECUTABLE(${HOST} ${HOST_PROGRAM}.F90 $) + +INCLUDE_DIRECTORIES(${CCPP_CAP_FILES}) + +set_target_properties(${HOST} PROPERTIES + COMPILE_FLAGS "${CMAKE_Fortran_FLAGS}" + LINK_FLAGS "${CMAKE_Fortran_FLAGS}") diff --git a/doc/HelloWorld/Makefile.example b/doc/HelloWorld/Makefile.example new file mode 100644 index 00000000..4c29f855 --- /dev/null +++ b/doc/HelloWorld/Makefile.example @@ -0,0 +1,87 @@ +# +# Makefile for HelloWorld example +# + +FC = gfortran + +# My dirs +BLDDIR := $(shell pwd -P) +MKFILE_PATH := $(shell dirname $(abspath $(lastword $(MAKEFILE_LIST)))) +ROOT = $(shell dirname $(shell dirname $(MKFILE_PATH))) +CAPGEN := $(ROOT)/scripts/ccpp_capgen.py +CAPGEN_ARGS := --host-files $(MKFILE_PATH)/hello_world_host.meta,$(MKFILE_PATH)/hello_world_mod.meta +CAPGEN_ARGS += --scheme-files $(MKFILE_PATH)/hello_scheme.meta,$(MKFILE_PATH)/temp_adjust.meta +CAPGEN_ARGS += --suites $(MKFILE_PATH)/hello_world_suite.xml +CAPGEN_ARGS += --generate-host-cap --host-name HelloWorld +#CAPGEN_ARGS += --verbose --verbose +SRCS_META := $(MKFILE_PATH)/hello_world_host.meta +SRCS_META += $(MKFILE_PATH)/hello_world_mod.meta +SRCS_META += $(MKFILE_PATH)/hello_scheme.meta +SRCS_META += $(MKFILE_PATH)/temp_adjust.meta +SRCS_XML := $(MKFILE_PATH)/hello_world_suite.xml + +INCFLAG = -I +INCPATH += $(INCFLAG)$(BLDDIR) $(INCFLAG)$(MKFILE_PATH) +FCFLAGS += -g + +# SOURCE FILES +SRCS_F90 = $(MKFILE_PATH)/hello_scheme.F90 $(MKFILE_PATH)/hello_world_host.F90 +SRCS_F90 += $(MKFILE_PATH)/hello_world_mod.F90 +SRCS_F90 += $(MKFILE_PATH)/temp_adjust.F90 +OBJS_F90 = hello_scheme.o temp_adjust.o hello_world_mod.o hello_world_host.o + +# Do we have generated CAP files? +$(eval CAPOBJS=$(shell if [ -f caplocal.txt ]; then cat caplocal.txt; fi)) + +.PHONY: all +all: HelloWorld + +# Generate CCPP cap files +capfiles.txt: $(SRCS_META) $(SRCS_XML) + $(CAPGEN) $(CAPGEN_ARGS) + +.PHONY: capobjs +capobjs: capfiles.txt + $(shell $(MKFILE_PATH)/mkcaplocal.sh $(BLDDIR)/capfiles.txt) + $(eval CAPOBJS=$(shell cat caplocal.txt)) + +# Human written +HelloWorld: capobjs $(CAPOBJS) $(OBJS_F90) + $(FC) $(FCFLAGS) -o $@ $(CAPOBJS) $(OBJS_F90) + +hello_scheme.o: ccpp_kinds.o $(MKFILE_PATH)/hello_scheme.F90 + $(FC) -c $(INCPATH) $(FCFLAGS) $(MKFILE_PATH)/hello_scheme.F90 + +temp_adjust.o: ccpp_kinds.o $(MKFILE_PATH)/temp_adjust.F90 + $(FC) -c $(INCPATH) $(FCFLAGS) $(MKFILE_PATH)/temp_adjust.F90 + +hello_world_mod.o: ccpp_kinds.o $(MKFILE_PATH)/hello_world_mod.F90 + $(FC) -c $(INCPATH) $(FCFLAGS) $(MKFILE_PATH)/hello_world_mod.F90 + +hello_world_host.o: HelloWorld_ccpp_cap.o ccpp_kinds.o +hello_world_host.o: $(MKFILE_PATH)/hello_world_host.F90 + $(FC) -c $(INCPATH) $(FCFLAGS) $(MKFILE_PATH)/hello_world_host.F90 + +# CCPP generated +ccpp_kinds.F90: capobjs + +HelloWorld_ccpp_cap.F90: capobjs + +ccpp_hello_world_suite_cap.F90: capobjs + +ccpp_kinds.o: ccpp_kinds.F90 + $(FC) -c $(INCPATH) $(FCFLAGS) ccpp_kinds.F90 + +HelloWorld_ccpp_cap.o: ccpp_kinds.o +HelloWorld_ccpp_cap.o: hello_world_mod.o ccpp_hello_world_suite_cap.o +HelloWorld_ccpp_cap.o: HelloWorld_ccpp_cap.F90 + $(FC) -c $(INCPATH) $(FCFLAGS) HelloWorld_ccpp_cap.F90 + +ccpp_hello_world_suite_cap.o: ccpp_kinds.o ccpp_hello_world_suite_cap.F90 + $(FC) -c $(INCPATH) $(FCFLAGS) ccpp_hello_world_suite_cap.F90 + +.PHONY: clean +clean: + rm -f $(shell if [ -f capfiles.txt ]; then cat capfiles.txt; fi) + rm -f *.o *.mod HelloWorld capfiles.txt caplocal.txt + rm -rf HelloWorld.dSYM diff --git a/doc/HelloWorld/README.md b/doc/HelloWorld/README.md new file mode 100644 index 00000000..476e55a6 --- /dev/null +++ b/doc/HelloWorld/README.md @@ -0,0 +1,77 @@ +Hello World +=========== + +**HelloWorld** is an introduction to use of the CCPP Framework to +connect a supporting host model with CCPP-compliant physics schemes +and suites. In this example, the simple host model is represented by: + * **hello_world_host.F90**: The main program and a subroutine which + calls a physics suite. + * **hello_world_mod.F90**: A module with host-model variables usable + by the CCPP framework. The module also contains code to initialize + the host state and to check the results of running the example + physics suite. + +**HelloWorld** comes with a single suite defined in the file, + **hello_world_suite.xml**. The suite consists of two 'physics' + schemes, **hello_scheme** (found in **hello_scheme.F90**) and + **temp_adjust** (found in **temp_adjust.F90**). + +Each Fortran (`.F90`) file above is paired with a corresponding +`.meta` file (e.g., **hello_world_mod.meta** and +**hello_world_mod.F90**). The `.meta` file contains CCPP metadata +which documents the data, datatypes, and/or schemes contained in the +accompanying Fortran (`.F90`) file. + +To build and run the HelloWorld example using CMake, follow these steps: + +``` +cd /doc/HelloWorld +mkdir build && cd build +cmake .. && make +./HelloWorld +``` + +The `cmake` step performs the following steps: + 1. Run `ccpp_capgen.py` to generate CCPP framework interface files + 2. Generate a dependency graph and create a Makefile + +The `make` step compiles the source files and the files generated by +the `ccpp_capgen.py` and creates the `HelloWorld` executable. + + 1. The `ccpp_capgen.py` step looks like: +``` +../../../scripts/ccpp_capgen.py \ + --host-files ../hello_world_host.meta,../hello_world_mod.meta \ + --scheme-files ../hello_scheme.meta,../temp_adjust.meta \ + --suites ../hello_world_suite.xml --generate-host-cap \ + --host-name HelloWorld +``` +This produces the following files: + * **HelloWorld_ccpp_cap.F90**: The CCPP interface functions to be called by +the host model. + * **ccpp_hello_world_suite_cap.F90**: The suite file which contains the +subroutines which execute each phase of the `hello_world_suite` physics +suite. In general, there is one cap file per suite definition file. + * **ccpp_kinds.F90**: A module which defines common types such as +`kind_phys` which are used by both host models and CCPP physics schemes. + * **capfiles.txt**: A file which lists all of the files generated by +the call to `ccpp_capgen.py` (except for capfiles.txt). This file can +be used in `cmake` (by including it in your CMakeLists.txt) or `make` +(by including it in your Makefile) as shown in CMakeLists.txt and +Makefile.example in this directory. + +To build and run the HelloWorld example with just a Makefile (no +CMake), follow these steps: + +``` +cd /doc/HelloWorld +mkdir build && cd build +make -f ../Makefile.example +./HelloWorld +``` +The `make` step performs the following steps: + 1. Run `ccpp_capgen.py` to generate CCPP framework interface files + 2. Compile source files and the files generated by the + `ccpp_capgen.py` and create the `HelloWorld` executable. + +The `ccpp_capgen.py` step is the same as in the CMake example. diff --git a/doc/HelloWorld/hello_scheme.F90 b/doc/HelloWorld/hello_scheme.F90 new file mode 100644 index 00000000..28019deb --- /dev/null +++ b/doc/HelloWorld/hello_scheme.F90 @@ -0,0 +1,85 @@ +!Hello demonstration parameterization +! + +MODULE hello_scheme + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: hello_scheme_init + PUBLIC :: hello_scheme_run + PUBLIC :: hello_scheme_finalize + +CONTAINS + +!> \section arg_table_hello_scheme_run Argument Table +!! \htmlinclude arg_table_hello_scheme_run.html +!! + SUBROUTINE hello_scheme_run(ncol, lev, ilev, timestep, temp_level, & + temp_layer, errmsg, errflg) +!---------------------------------------------------------------- + IMPLICIT NONE +!---------------------------------------------------------------- + + integer, intent(in) :: ncol, lev, ilev + REAL(kind_phys), intent(inout) :: temp_level(:, :) + real(kind_phys), intent(in) :: timestep + REAL(kind_phys), INTENT(out) :: temp_layer(:, :) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg +!---------------------------------------------------------------- + + integer :: col_index + integer :: lev_index + + errmsg = '' + errflg = 0 + + if (ilev /= (lev + 1)) then + errflg = 1 + errmsg = 'Invalid value for ilev, must be lev+1' + return + end if + + do col_index = 1, ncol + do lev_index = 1, lev + temp_layer(col_index, lev_index) = (temp_level(col_index, lev_index) & + + temp_level(col_index, lev_index + 1)) / 2.0_kind_phys + end do + end do + + END SUBROUTINE hello_scheme_run + +!> \section arg_table_hello_scheme_init Argument Table +!! \htmlinclude arg_table_hello_scheme_init.html +!! + subroutine hello_scheme_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine hello_scheme_init + +!> \section arg_table_hello_scheme_finalize Argument Table +!! \htmlinclude arg_table_hello_scheme_finalize.html +!! + subroutine hello_scheme_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine hello_scheme_finalize + +END MODULE hello_scheme diff --git a/doc/HelloWorld/hello_scheme.meta b/doc/HelloWorld/hello_scheme.meta new file mode 100644 index 00000000..a5995b7a --- /dev/null +++ b/doc/HelloWorld/hello_scheme.meta @@ -0,0 +1,97 @@ +[ccpp-table-properties] + name = hello_scheme + type = scheme +[ccpp-arg-table] + name = hello_scheme_run + type = scheme +[ ncol ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ lev ] + standard_name = vertical_layer_dimension + type = integer + units = count + dimensions = () + intent = in +[ ilev ] + standard_name = vertical_interface_dimension + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_level ] + standard_name = potential_temperature_at_interface + units = K + dimensions = (ccpp_constant_one:horizontal_loop_extent, vertical_interface_dimension) + type = real + kind = kind_phys + intent = inout +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real + kind = kind_phys + intent = out +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = hello_scheme_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = hello_scheme_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/doc/HelloWorld/hello_scheme_files.txt b/doc/HelloWorld/hello_scheme_files.txt new file mode 100644 index 00000000..2856889f --- /dev/null +++ b/doc/HelloWorld/hello_scheme_files.txt @@ -0,0 +1,2 @@ +hello_scheme.meta +temp_adjust.meta diff --git a/doc/HelloWorld/hello_world_host.F90 b/doc/HelloWorld/hello_world_host.F90 new file mode 100644 index 00000000..2c4066de --- /dev/null +++ b/doc/HelloWorld/hello_world_host.F90 @@ -0,0 +1,87 @@ +module hello_world_host + + use ccpp_kinds, only: kind_phys + + implicit none + private + + public hello_world_sub + +CONTAINS + + !> \section arg_table_hello_world_sub Argument Table + !! \htmlinclude arg_table_hello_world_sub.html + !! + subroutine hello_world_sub() + + use hello_world_mod, only: ncols + use HelloWorld_ccpp_cap, only: HelloWorld_ccpp_physics_initialize + use HelloWorld_ccpp_cap, only: HelloWorld_ccpp_physics_timestep_initial + use HelloWorld_ccpp_cap, only: HelloWorld_ccpp_physics_run + use HelloWorld_ccpp_cap, only: HelloWorld_ccpp_physics_timestep_final + use HelloWorld_ccpp_cap, only: HelloWorld_ccpp_physics_finalize + use HelloWorld_ccpp_cap, only: ccpp_physics_suite_list + use HelloWorld_ccpp_cap, only: ccpp_physics_suite_part_list + use hello_world_mod, only: init_temp, compare_temp + + integer :: col_start, col_end + integer :: index + character(len=128), allocatable :: part_names(:) + character(len=512) :: errmsg + integer :: errflg + + ! Initialize our 'data' + call init_temp() + + ! Use the suite information to setup the run + call HelloWorld_ccpp_physics_initialize('hello_world_suite', errmsg, errflg) + if (errflg /= 0) then + write(6, *) trim(errmsg) + stop + end if + + ! Initialize the timestep + call HelloWorld_ccpp_physics_timestep_initial('hello_world_suite', errmsg, errflg) + if (errflg /= 0) then + write(6, *) trim(errmsg) + stop + end if + + do col_start = 1, ncols, 5 + col_end = MIN(col_start + 4, ncols) + + call HelloWorld_ccpp_physics_run('hello_world_suite', 'physics', col_start, col_end, errmsg, errflg) + if (errflg /= 0) then + write(6, *) trim(errmsg) + call ccpp_physics_suite_part_list('hello_world_suite', part_names, errmsg, errflg) + write(6, *) 'Available suite parts are:' + do index = 1, size(part_names) + write(6, *) trim(part_names(index)) + end do + stop + end if + end do + + call HelloWorld_ccpp_physics_timestep_final('hello_world_suite', errmsg, errflg) + + call HelloWorld_ccpp_physics_finalize('hello_world_suite', errmsg, errflg) + if (errflg /= 0) then + write(6, *) trim(errmsg) + write(6,'(a)') 'An error occurred in ccpp_timestep_final, Exiting...' + stop + end if + + if (compare_temp()) then + write(6, *) 'Answers are correct!' + else + write(6, *) 'Answers are not correct!' + end if + + end subroutine hello_world_sub + +end module hello_world_host + +program hello_world + use hello_world_host, only: hello_world_sub + call hello_world_sub() +end program hello_world diff --git a/doc/HelloWorld/hello_world_host.meta b/doc/HelloWorld/hello_world_host.meta new file mode 100644 index 00000000..938ebdc5 --- /dev/null +++ b/doc/HelloWorld/hello_world_host.meta @@ -0,0 +1,29 @@ +[ccpp-table-properties] + name = hello_world_sub + type = host +[ccpp-arg-table] + name = hello_world_sub + type = host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () +[ col_end ] + standard_name = horizontal_loop_end + type = integer + units = count + dimensions = () +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer diff --git a/doc/HelloWorld/hello_world_mod.F90 b/doc/HelloWorld/hello_world_mod.F90 new file mode 100644 index 00000000..44b689dd --- /dev/null +++ b/doc/HelloWorld/hello_world_mod.F90 @@ -0,0 +1,59 @@ +module hello_world_mod + + use ccpp_kinds, only: kind_phys + + implicit none + public + + integer :: ntimes_loop + !> \section arg_table_hello_world_mod Argument Table + !! \htmlinclude arg_table_hello_world_host.html + !! + integer, parameter :: ncols = 10 + integer, parameter :: pver = 5 + integer, parameter :: pverp = 6 + real(kind_phys) :: temp_midpoints(ncols, pver) + real(kind_phys) :: temp_interfaces(ncols, pverp) + real(kind_phys) :: dt + + public :: init_temp + public :: compare_temp + +contains + + subroutine init_temp() + + integer :: col + integer :: lev + + temp_midpoints = 0.0_kind_phys + do lev = 1, pverp + do col = 1, ncols + temp_interfaces(col, lev) = real(((lev - 1) * ncols) + col, kind=kind_phys) + end do + end do + + end subroutine init_temp + + logical function compare_temp() + + integer :: col + integer :: lev + real(kind_phys) :: avg + + compare_temp = .true. + + do lev = 1, pver + do col = 1, ncols + avg = (temp_interfaces(col,lev) + temp_interfaces(col,lev+1)) + avg = 1.0_kind_phys + (avg / 2.0_kind_phys) + if (temp_midpoints(col, lev) /= avg) then + write(6, *) col, lev, temp_midpoints(col, lev), avg + compare_temp = .false. + end if + end do + end do + + end function compare_temp + +end module hello_world_mod diff --git a/doc/HelloWorld/hello_world_mod.meta b/doc/HelloWorld/hello_world_mod.meta new file mode 100644 index 00000000..2f56809b --- /dev/null +++ b/doc/HelloWorld/hello_world_mod.meta @@ -0,0 +1,37 @@ +[ccpp-table-properties] + name = hello_world_mod + type = module +[ccpp-arg-table] + name = hello_world_mod + type = module +[ ncols] + standard_name = horizontal_dimension + units = count + type = integer + dimensions = () +[ pver ] + standard_name = vertical_layer_dimension + units = count + type = integer + dimensions = () +[ pverp ] + standard_name = vertical_interface_dimension + type = integer + units = count + dimensions = () +[ temp_midpoints ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_dimension, vertical_layer_dimension) + type = real | kind = kind_phys +[ temp_interfaces ] + standard_name = potential_temperature_at_interface + units = K + dimensions = (horizontal_dimension, vertical_interface_dimension) + type = real | kind = kind_phys +[ dt ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real | kind = kind_phys diff --git a/doc/HelloWorld/hello_world_suite.xml b/doc/HelloWorld/hello_world_suite.xml new file mode 100644 index 00000000..3f078d74 --- /dev/null +++ b/doc/HelloWorld/hello_world_suite.xml @@ -0,0 +1,8 @@ + + + + + hello_scheme + temp_adjust + + diff --git a/doc/HelloWorld/mkcaplocal.sh b/doc/HelloWorld/mkcaplocal.sh new file mode 100755 index 00000000..7041fd45 --- /dev/null +++ b/doc/HelloWorld/mkcaplocal.sh @@ -0,0 +1,10 @@ +#! /bin/bash + +lname="caplocal.txt" +if [ -f "${lname}" ]; then + rm ${lname} +fi +touch ${lname} +while read line || [[ -n "${line}" ]]; do + echo "`basename ${line} .F90`.o" >> ${lname} +done < "${1}" diff --git a/doc/HelloWorld/temp_adjust.F90 b/doc/HelloWorld/temp_adjust.F90 new file mode 100644 index 00000000..4b6f6186 --- /dev/null +++ b/doc/HelloWorld/temp_adjust.F90 @@ -0,0 +1,78 @@ +!Hello demonstration parameterization +! + +MODULE temp_adjust + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: temp_adjust_init + PUBLIC :: temp_adjust_run + PUBLIC :: temp_adjust_finalize + +CONTAINS + +!> \section arg_table_temp_adjust_run Argument Table +!! \htmlinclude arg_table_temp_adjust_run.html +!! + SUBROUTINE temp_adjust_run(nbox, lev, temp_layer, & + timestep, errmsg, errflg) +!---------------------------------------------------------------- + IMPLICIT NONE +!---------------------------------------------------------------- + + integer, intent(in) :: nbox, lev + REAL(kind_phys), intent(inout) :: temp_layer(:, :) + real(kind_phys), intent(in) :: timestep + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg +!---------------------------------------------------------------- + + integer :: box_index + integer :: lev_index + + errmsg = '' + errflg = 0 + + do box_index = 1, nbox + do lev_index = 1, lev + temp_layer(box_index, lev_index) = temp_layer(box_index, lev_index) & + + 1.0_kind_phys + end do + end do + + END SUBROUTINE temp_adjust_run + +!> \section arg_table_temp_adjust_init Argument Table +!! \htmlinclude arg_table_temp_adjust_init.html +!! + subroutine temp_adjust_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_adjust_init + +!> \section arg_table_temp_adjust_finalize Argument Table +!! \htmlinclude arg_table_temp_adjust_finalize.html +!! + subroutine temp_adjust_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_adjust_finalize + +END MODULE temp_adjust diff --git a/doc/HelloWorld/temp_adjust.meta b/doc/HelloWorld/temp_adjust.meta new file mode 100644 index 00000000..2e95195e --- /dev/null +++ b/doc/HelloWorld/temp_adjust.meta @@ -0,0 +1,84 @@ +[ccpp-table-properties] + name = temp_adjust + type = scheme +[ccpp-arg-table] + name = temp_adjust_run + type = scheme +[ nbox ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ lev ] + standard_name = vertical_layer_dimension + type = integer + units = count + dimensions = () + intent = in +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real + kind = kind_phys + intent = inout +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_adjust_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_adjust_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/logging/logging.F90 b/logging/logging.F90 new file mode 100644 index 00000000..59e82786 --- /dev/null +++ b/logging/logging.F90 @@ -0,0 +1,406 @@ +module marbl_logging + +! ============ +! Module Usage +! ============ +! +! Assume a variable named StatusLog (as appears in the marbl_interface_class) +! +! ----------------------------------------------- +! Use the following routines to write log entries +! ----------------------------------------------- +! +! (1) StatusLog%log_noerror -- this stores a log message in StatusLog that does +! not contain a fatal error +! (2) StatusLog%log_header -- this stores a log message in StatusLog that is +! meant to be read as a section header; e.g. StatusLog%log_header('HEADER',...) +! writes the following (including blank lines) +! +! ------ +! HEADER +! ------ +! +! (3) StatusLog%log_error -- this stores a log message in StatusLog that DOES +! contain a fatal error. It does this by setting StatusLog%labort_marbl = +! .true.; when a call from the GCM to MARBL returns, it is important for the +! GCM to check the value of StatusLog%labort_marbl and abort the run if an +! error has been reported. +! (4) StatusLog%log_error_trace -- this stores a log message in StatusLog +! detailing what subroutine was just called and where it was called from. It +! is meant to provide more information when trying to trace the path through +! the code that resulted in an error. +! +! ----------------------------------------------- +! Pseudo-code for writing StatusLog in the driver +! ----------------------------------------------- +! +! type(marbl_status_log_entry_type), pointer :: LogEntry +! +! ! Set pointer to first entry of the log +! LogEntry => StatusLog%FullLog +! +! do while (associated(LogEntry)) +! ! If running in parallel, you may want to check if you are the master +! ! task or if LogEntry%lalltasks = .true. +! write(stdout,*) trim(LogEntry%LogMessage) +! LogEntry => LogEntry%next +! end do +! +! ! Erase contents of log now that they have been written out +! call StatusLog%erase() +! +! if (StatusLog%labort_marbl) then +! [GCM abort call: "error found in MARBL"] +! end if +! + + use marbl_kinds_mod, only : char_len + + implicit none + private + save + + integer, parameter, private :: marbl_log_len = 2*char_len + + !**************************************************************************** + + type, public :: marbl_status_log_entry_type + integer :: ElementInd = -1 ! ElementInd < 0 implies no location data + logical :: lonly_master_writes ! True => message should be written to stdout + ! master task; False => all tasks + character(len=marbl_log_len) :: LogMessage ! Message text + character(len=char_len) :: CodeLocation ! Information on where log was written + + type(marbl_status_log_entry_type), pointer :: next + end type marbl_status_log_entry_type + + !**************************************************************************** + + ! Note: this data type is not in use at the moment, but it is included as an + ! initial step towards allowing the user some control over what types + ! of messages are added to the log. For example, if you do not want + ! the contents of namelists written to the log, you would simply set + ! + ! lLogNamelist = .false. + ! + ! In the future we hope to be able to set these options via namelist, + ! but for now lLogNamelist, lLogGeneral, lLogWarning, and lLogError are + ! all set to .true. and can not be changed without modifying the source + ! code in this file. + type, private :: marbl_log_output_options_type + logical :: labort_on_warning ! True => elevate Warnings to Errors + logical :: lLogVerbose ! Debugging output should be given Verbose label + logical :: lLogNamelist ! Write namelists to log? + logical :: lLogGeneral ! General diagnostic output + logical :: lLogWarning ! Warnings (can be elevated to errors via labort_on_warning) + logical :: lLogError ! Errors (will toggle labort_marbl whether log + ! is written or not) + contains + procedure :: construct => marbl_output_options_constructor + end type marbl_log_output_options_type + + !**************************************************************************** + + type, public :: marbl_log_type + logical, private :: lconstructed = .false. ! True => constructor was already called + logical, public :: labort_marbl = .false. ! True => driver should abort GCM + logical, public :: lwarning = .false. ! True => warnings are present + type(marbl_log_output_options_type) :: OutputOptions + type(marbl_status_log_entry_type), pointer :: FullLog + type(marbl_status_log_entry_type), pointer :: LastEntry + contains + procedure, public :: construct => marbl_log_constructor + procedure, public :: log_header => marbl_log_header + procedure, public :: log_error => marbl_log_error + procedure, public :: log_warning => marbl_log_warning + procedure, public :: log_noerror => marbl_log_noerror + procedure, public :: log_error_trace => marbl_log_error_trace + procedure, public :: log_warning_trace => marbl_log_warning_trace + procedure, public :: erase => marbl_log_erase + procedure, private :: append_to_log + end type marbl_log_type + + !**************************************************************************** + +contains + + !**************************************************************************** + + subroutine marbl_output_options_constructor(this, labort_on_warning, LogVerbose, LogNamelist, & + LogGeneral, LogWarning, LogError) + + class(marbl_log_output_options_type), intent(inout) :: this + logical, intent(in), optional :: labort_on_warning, LogVerbose, LogNamelist + logical, intent(in), optional :: LogGeneral, LogWarning, LogError + + if (present(labort_on_warning)) then + this%labort_on_warning = labort_on_warning + else + this%labort_on_warning = .false. + end if + + if (present(LogVerbose)) then + this%lLogVerbose = LogVerbose + else + this%lLogVerbose = .false. + end if + + if (present(LogNamelist)) then + this%lLogNamelist = LogNamelist + else + this%lLogNamelist = .true. + end if + + if (present(LogGeneral)) then + this%lLogGeneral = LogGeneral + else + this%lLogGeneral = .true. + end if + + if (present(LogWarning)) then + this%lLogWarning = LogWarning + else + this%lLogWarning = .true. + end if + + if (present(LogError)) then + this%lLogError = LogError + else + this%lLogError = .true. + end if + + end subroutine marbl_output_options_constructor + + !**************************************************************************** + + subroutine marbl_log_constructor(this) + + class(marbl_log_type), intent(inout) :: this + + if (this%lconstructed) return + this%lconstructed = .true. + nullify(this%FullLog) + nullify(this%LastEntry) + call this%OutputOptions%construct() + + end subroutine marbl_log_constructor + + !**************************************************************************** + + subroutine marbl_log_header(this, HeaderMsg, CodeLoc) + + class(marbl_log_type), intent(inout) :: this + ! StatusMsg is the message to be printed in the log; it does not need to + ! contain the name of the module or subroutine producing the log message + ! CodeLoc is the name of the subroutine that is calling StatusLog%log_noerror + character(len=*), intent(in) :: HeaderMsg, CodeLoc + + character(len=len_trim(HeaderMsg)) :: dashes + integer :: n + + do n=1, len(dashes) + dashes(n:n) = '-' + end do + call this%log_noerror('', CodeLoc) + call this%log_noerror(dashes, CodeLoc) + call this%log_noerror(HeaderMsg, CodeLoc) + call this%log_noerror(dashes, CodeLoc) + call this%log_noerror('', CodeLoc) + + end subroutine marbl_log_header + + !**************************************************************************** + + subroutine marbl_log_error(this, ErrorMsg, CodeLoc, ElemInd) + + class(marbl_log_type), intent(inout) :: this + ! ErrorMsg is the error message to be printed in the log; it does not need + ! to contain the name of the module or subroutine triggering the error + ! CodeLoc is the name of the subroutine that is calling StatusLog%log_error + character(len=*), intent(in) :: ErrorMsg, CodeLoc + integer, optional, intent(in) :: ElemInd + + character(len=marbl_log_len) :: ErrorMsg_loc ! Message text + + this%labort_marbl = .true. + + ! Only allocate memory and add entry if we want to log full namelist! + if (.not.this%OutputOptions%lLogError) then + return + end if + + write(ErrorMsg_loc, "(4A)") "MARBL ERROR (", trim(CodeLoc), "): ", & + trim(ErrorMsg) + + call this%append_to_log(ErrorMsg_loc, CodeLoc, ElemInd, lonly_master_writes=.false.) + + end subroutine marbl_log_error + + !**************************************************************************** + + subroutine marbl_log_warning(this, WarningMsg, CodeLoc, ElemInd) + + class(marbl_log_type), intent(inout) :: this + ! WarningMsg is the message to be printed in the log; it does not need to + ! contain the name of the module or subroutine producing the log message + ! CodeLoc is the name of the subroutine that is calling StatusLog%log_warning + character(len=*), intent(in) :: WarningMsg, CodeLoc + integer, optional, intent(in) :: ElemInd + + character(len=marbl_log_len) :: WarningMsg_loc ! Message text + + this%lwarning = .true. + + ! Only allocate memory and add entry if we want to log full namelist! + if (.not.this%OutputOptions%lLogWarning) then + return + end if + + write(WarningMsg_loc, "(4A)") "MARBL WARNING (", trim(CodeLoc), "): ", & + trim(WarningMsg) + + call this%append_to_log(WarningMsg_loc, CodeLoc, ElemInd, lonly_master_writes=.false.) + + end subroutine marbl_log_warning + + !**************************************************************************** + + subroutine marbl_log_noerror(this, StatusMsg, CodeLoc, ElemInd, lonly_master_writes) + + class(marbl_log_type), intent(inout) :: this + ! StatusMsg is the message to be printed in the log; it does not need to + ! contain the name of the module or subroutine producing the log message + ! CodeLoc is the name of the subroutine that is calling StatusLog%log_noerror + character(len=*), intent(in) :: StatusMsg, CodeLoc + integer, optional, intent(in) :: ElemInd + ! If lonly_master_writes is .false., then this is a message that should be + ! printed out regardless of which task produced it. By default, MARBL assumes + ! that only the master task needs to print a message + logical, optional, intent(in) :: lonly_master_writes + + ! Only allocate memory and add entry if we want to log full namelist! + if (.not.this%OutputOptions%lLogGeneral) then + return + end if + + call this%append_to_log(StatusMsg, CodeLoc, ElemInd, lonly_master_writes) + + end subroutine marbl_log_noerror + + !**************************************************************************** + + subroutine append_to_log(this, StatusMsg, CodeLoc, ElemInd, lonly_master_writes) + + class(marbl_log_type), intent(inout) :: this + ! StatusMsg is the message to be printed in the log; it does not need to + ! contain the name of the module or subroutine producing the log message + ! CodeLoc is the name of the subroutine that is calling StatusLog%log_noerror + character(len=*), intent(in) :: StatusMsg, CodeLoc + integer, optional, intent(in) :: ElemInd + ! If lonly_master_writes is .false., then this is a message that should be + ! printed out regardless of which task produced it. By default, MARBL assumes + ! that only the master task needs to print a message + logical, optional, intent(in) :: lonly_master_writes + type(marbl_status_log_entry_type), pointer :: new_entry + + allocate(new_entry) + nullify(new_entry%next) + if (present(ElemInd)) then + new_entry%ElementInd = ElemInd + else + new_entry%ElementInd = -1 + end if + new_entry%LogMessage = trim(StatusMsg) + new_entry%CodeLocation = trim(CodeLoc) + if (present(lonly_master_writes)) then + new_entry%lonly_master_writes = lonly_master_writes + else + new_entry%lonly_master_writes = .true. + end if + + if (associated(this%FullLog)) then + ! Append new entry to last entry in the log + this%LastEntry%next => new_entry + else + this%FullLog => new_entry + end if + ! Update LastEntry attribute of linked list + this%LastEntry => new_entry + + end subroutine append_to_log + + !**************************************************************************** + + subroutine marbl_log_error_trace(this, RoutineName, CodeLoc, ElemInd) + + ! This routine should only be called if another subroutine has returned and + ! StatusLog%labort_marbl = .true. + + class(marbl_log_type), intent(inout) :: this + ! RoutineName is the name of the subroutine that returned with + ! labort_marbl = .true. + ! CodeLoc is the name of the subroutine that is calling StatusLog%log_error_trace + ! + ! Log will contain a message along the lines of + ! + ! "(CodeLoc) Error reported from RoutineName" + ! + ! When the log is printed, this will provide a traceback through the sequence + ! of calls that led to the original error message. + character(len=*), intent(in) :: RoutineName, CodeLoc + integer, optional, intent(in) :: ElemInd + character(len=char_len) :: log_message + + write(log_message, "(2A)") "Error reported from ", trim(RoutineName) + call this%log_error(log_message, CodeLoc, ElemInd) + + end subroutine marbl_log_error_trace + + !**************************************************************************** + + subroutine marbl_log_warning_trace(this, RoutineName, CodeLoc, ElemInd) + + ! This routine should only be called if another subroutine has returned and + ! StatusLog%lwarning = .true. + + class(marbl_log_type), intent(inout) :: this + ! RoutineName is the name of the subroutine that returned with + ! lwarning = .true. + ! CodeLoc is the name of the subroutine that is calling StatusLog%log_warning_trace + ! + ! Log will contain a message along the lines of + ! + ! "(CodeLoc) Warning reported from RoutineName" + ! + ! When the log is printed, this will provide a traceback through the sequence + ! of calls that led to the original warning message. + character(len=*), intent(in) :: RoutineName, CodeLoc + integer, optional, intent(in) :: ElemInd + character(len=char_len) :: log_message + + write(log_message, "(2A)") "Warning reported from ", trim(RoutineName) + call this%log_warning(log_message, CodeLoc, ElemInd) + this%lwarning = .false. + + end subroutine marbl_log_warning_trace + + !**************************************************************************** + + subroutine marbl_log_erase(this) + + class(marbl_log_type), intent(inout) :: this + type(marbl_status_log_entry_type), pointer :: tmp + + do while (associated(this%FullLog)) + tmp => this%FullLog%next + deallocate(this%FullLog) + this%FullLog => tmp + end do + nullify(this%FullLog) + nullify(this%LastEntry) + + this%lwarning = .false. + + end subroutine marbl_log_erase + +end module marbl_logging diff --git a/schema/suite_v1_0.xsd b/schema/suite_v1_0.xsd new file mode 100644 index 00000000..121438ed --- /dev/null +++ b/schema/suite_v1_0.xsd @@ -0,0 +1,128 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/scripts/ccpp_capgen.py b/scripts/ccpp_capgen.py new file mode 100755 index 00000000..3e9075ba --- /dev/null +++ b/scripts/ccpp_capgen.py @@ -0,0 +1,795 @@ +#!/usr/bin/env python + +""" +Create CCPP parameterization caps, host-model interface code, +physics suite runtime code, and CCPP framework documentation. +""" + +# Python library imports +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import argparse +import sys +import os +import logging +import re +# CCPP framework imports +from ccpp_datafile import generate_ccpp_datatable +from ccpp_suite import API +from file_utils import check_for_writeable_file, remove_dir, replace_paths +from file_utils import create_file_list, move_modified_files +from file_utils import KINDS_FILENAME, KINDS_MODULE +from fortran_tools import parse_fortran_file, FortranWriter +from host_cap import write_host_cap +from host_model import HostModel +from metadata_table import parse_metadata_file, SCHEME_HEADER_TYPE +from parse_tools import init_log, set_log_level, context_string +from parse_tools import CCPPError, ParseInternalError + +## Capture the Framework root +__SCRIPT_PATH = os.path.dirname(__file__) +__FRAMEWORK_ROOT = os.path.abspath(os.path.join(__SCRIPT_PATH, os.pardir)) +## Init this now so that all Exceptions can be trapped +_LOGGER = init_log(os.path.basename(__file__)) + +_EPILOG = ''' +''' + +## Recognized Fortran filename extensions +_FORTRAN_FILENAME_EXTENSIONS = ['F90', 'f90', 'F', 'f'] + +## Metadata table types which can have extra variables in Fortran +_EXTRA_VARIABLE_TABLE_TYPES = ['module', 'host', 'ddt'] + +## Metadata table types where order is significant +_ORDERED_TABLE_TYPES = [SCHEME_HEADER_TYPE] + +############################################################################### +def parse_command_line(args, description): +############################################################################### + """Create an ArgumentParser to parse and return command-line arguments""" + ap_format = argparse.RawTextHelpFormatter + parser = argparse.ArgumentParser(description=description, + formatter_class=ap_format, epilog=_EPILOG) + + parser.add_argument("--host-files", metavar='', + type=str, required=True, + help="""Comma separated list of host filenames to process +Filenames with a '.meta' suffix are treated as host model metadata files +Filenames with a '.txt' suffix are treated as containing a list of .meta +filenames""") + + parser.add_argument("--scheme-files", metavar='', + type=str, required=True, + help="""Comma separated list of scheme filenames to process +Filenames with a '.meta' suffix are treated as scheme metadata files +Filenames with a '.txt' suffix are treated as containing a list of .meta +filenames""") + + parser.add_argument("--suites", metavar='', + type=str, required=True, + help="""Comma separated list of suite definition filenames to process +Filenames with a '.xml' suffix are treated as suite definition XML files +Other filenames are treated as containing a list of .xml filenames""") + + parser.add_argument("--preproc-directives", + metavar='VARDEF1[,VARDEF2 ...]', type=str, default='', + help="Proprocessor directives used to correctly parse source files") + + parser.add_argument("--ccpp-datafile", type=str, + metavar='', + default="datatable.xml", + help="Filename for information on content generated by the CCPP Framework") + + parser.add_argument("--output-root", type=str, + metavar='', + default=os.getcwd(), + help="directory for generated files") + + parser.add_argument("--host-name", type=str, default='', + help='''Name of host model to use in CCPP API +If this option is passed, a host model cap is generated''') + + parser.add_argument("--clean", action='store_true', default=False, + help='Remove files created by this script, then exit') + + parser.add_argument("--kind-phys", type=str, default='REAL64', + metavar="kind_phys", + help='Data size for real(kind_phys) data') + + parser.add_argument("--generate-docfiles", + metavar='HTML | Latex | HTML,Latex', type=str, + help="Generate LaTeX and/or HTML documentation") + + parser.add_argument("--force-overwrite", action='store_true', default=False, + help="""Overwrite all CCPP-generated files, even +if unmodified""") + parser.add_argument("--verbose", action='count', default=0, + help="Log more activity, repeat for increased output") + pargs = parser.parse_args(args) + return pargs + +############################################################################### +def delete_pathnames_from_file(capfile, logger): +############################################################################### + """Remove all the filenames found in , then delete """ + root_path = os.path.dirname(os.path.abspath(capfile)) + success = True + with open(capfile, 'r') as infile: + for line in infile.readlines(): + path = line.strip() + # Skip blank lines and lines which appear to start with a comment. + if path and (path[0] != '#') and (path[0] != '!'): + # Check for an absolute path + if not os.path.isabs(path): + # Assume relative pathnames are relative to pathsfile + path = os.path.normpath(os.path.join(root_path, path)) + # end if + logger.info("Clean: Removing {}".format(path)) + try: + os.remove(path) + except OSError as oserr: + success = False + errmsg = 'Unable to remove {}\n{}' + logger.warning(errmsg.format(path, oserr)) + # end try + # end if (else skip blank or comment line) + # end for + # end with open + logger.info("Clean: Removing {}".format(capfile)) + try: + os.remove(capfile) + except OSError as oserr: + success = False + errmsg = 'Unable to remove {}\n{}' + logger.warning(errmsg.format(capfile, oserr)) + # end try + if success: + logger.info("ccpp_capgen clean successful, exiting") + else: + logger.info("ccpp_capgen clean encountered errors, exiting") + # end if + +############################################################################### +def find_associated_fortran_file(filename): +############################################################################### + "Find the Fortran file associated with metadata file, " + fort_filename = None + lastdot = filename.rfind('.') + ##XXgoldyXX: Should we check to make sure ends in '.meta.'? + if lastdot < 0: + base = filename + '.' + else: + base = filename[0:lastdot+1] + # end if + for extension in _FORTRAN_FILENAME_EXTENSIONS: + test_name = base + extension + if os.path.exists(test_name): + fort_filename = test_name + break + # end if + # end for + if fort_filename is None: + raise CCPPError("Cannot find Fortran file associated with {}".format(filename)) + # end if + return fort_filename + +############################################################################### +def create_kinds_file(kind_phys, output_dir, logger): +############################################################################### + "Create the kinds.F90 file to be used by CCPP schemes and suites" + kinds_filepath = os.path.join(output_dir, KINDS_FILENAME) + if logger is not None: + msg = 'Writing {} to {}' + logger.info(msg.format(KINDS_FILENAME, output_dir)) + # end if + with FortranWriter(kinds_filepath, "w", + "kinds for CCPP", KINDS_MODULE) as kindf: + use_stmt = 'use ISO_FORTRAN_ENV, only: kind_phys => {}' + kindf.write(use_stmt.format(kind_phys), 1) + kindf.write_preamble() + kindf.write('public kind_phys', 1) + # end with + return kinds_filepath + +############################################################################### +def add_error(error_string, new_error): +############################################################################### + '''Add an error () to , separating errors by a + newline''' + if error_string: + error_string += '\n' + # end if + return error_string + new_error + +############################################################################### +def is_arrayspec(local_name): +############################################################################### + "Return True iff is an array reference" + return '(' in local_name + +############################################################################### +def find_var_in_list(local_name, var_list): +############################################################################### + """Find a variable, , in . + local name is used because Fortran metadata variables do not have + real standard names. + Note: The search is case insensitive. + Return both the variable and the index where it was found. + If not found, return None for the variable and -1 for the index + """ + vvar = None + vind = -1 + lname = local_name.lower() + for lind, lvar in enumerate(var_list): + if lvar.get_prop_value('local_name').lower() == lname: + vvar = lvar + vind = lind + break + # end if + # end for + return vvar, vind + +############################################################################### +def var_comp(prop_name, mvar, fvar, title, case_sensitive=False): +############################################################################### + "Compare a property between two variables" + errors = '' + mprop = mvar.get_prop_value(prop_name) + fprop = fvar.get_prop_value(prop_name) + if not case_sensitive: + if isinstance(mprop, str): + mprop = mprop.lower() + # end if + if isinstance(fprop, str): + fprop = fprop.lower() + # end if + # end if + comp = mprop == fprop + if not comp: + errmsg = '{} mismatch ({} != {}) in {}{}' + ctx = context_string(mvar.context) + errors = add_error(errors, + errmsg.format(prop_name, mprop, fprop, title, ctx)) + # end if + return errors + +############################################################################### +def dims_comp(mheader, mvar, fvar, title, logger, case_sensitive=False): +############################################################################### + "Compare the dimensions attribute of two variables" + errors = '' + mdims = mvar.get_dimensions() + fdims = mheader.convert_dims_to_standard_names(fvar, logger=logger) + comp = len(mdims) == len(fdims) + if not comp: + errmsg = 'Error: rank mismatch in {}/{} ({} != {}){}' + stdname = mvar.get_prop_value('standard_name') + ctx = context_string(mvar.context) + errors = add_error(errors, errmsg.format(title, stdname, + len(mdims), len(fdims), ctx)) + # end if + if comp: + # Now, compare the dims + for dim_ind, mdim in enumerate(mdims): + if ':' in mdim: + mdim = ':'.join([x.strip() for x in mdim.split(':')]) + # end if + fdim = fdims[dim_ind].strip() + if ':' in fdim: + fdim = ':'.join([x.strip() for x in fdim.split(':')]) + # end if + if not case_sensitive: + mdim = mdim.lower() + fdim = fdim.lower() + # end if + # Naked colon is okay for Fortran side + comp = fdim in (':', fdim) + if not comp: + errmsg = 'Error: dim {} mismatch ({} != {}) in {}/{}{}' + stdname = mvar.get_prop_value('standard_name') + ctx = context_string(mvar.context) + errmsg = errmsg.format(dim_ind+1, mdim, fdims[dim_ind], + title, stdname, ctx) + errors = add_error(errors, errmsg) + # end if + # end for + # end if + return errors + +############################################################################### +def compare_fheader_to_mheader(meta_header, fort_header, logger): +############################################################################### + """Compare a metadata header against the header generated from the + corresponding code in the associated Fortran file. + Return a string with any errors found (empty string is no errors). + """ + errors_found = '' + title = meta_header.title + mht = meta_header.header_type + fht = fort_header.header_type + if mht != fht: + # Special case, host metadata can be in a Fortran module or scheme + if (mht != 'host') or (fht not in ('module', SCHEME_HEADER_TYPE)): + errmsg = 'Metadata table type mismatch for {}, {} != {}{}' + ctx = meta_header.start_context() + raise CCPPError(errmsg.format(title, meta_header.header_type, + fort_header.header_type, ctx)) + # end if + else: + # The headers should have the same variables in the same order + # The exception is that a Fortran module can have variable declarations + # after all the metadata variables. + mlist = meta_header.variable_list() + mlen = len(mlist) + flist = fort_header.variable_list() + flen = len(flist) + # Remove array references from mlist before checking lengths + for mvar in mlist: + if is_arrayspec(mvar.get_prop_value('local_name')): + mlen -= 1 + # end if + # end for + list_match = mlen == flen + # Check for optional Fortran variables that are not in metadata + if flen > mlen: + for find, fvar in enumerate(flist): + lname = fvar.get_prop_value('local_name') + _, mind = find_var_in_list(lname, mlist) + if mind < 0: + if fvar.get_prop_value('optional'): + # This is an optional variable + flen -= 1 + # end if + # end if + # end for + list_match = mlen == flen + # end if + if not list_match: + if fht in _EXTRA_VARIABLE_TABLE_TYPES: + if flen > mlen: + list_match = True + else: + etype = 'Fortran {}'.format(fht) + # end if + elif flen > mlen: + etype = 'metadata header' + else: + etype = 'Fortran {}'.format(fht) + # end if + # end if + if not list_match: + errmsg = 'Variable mismatch in {}, variables missing from {}.' + errors_found = add_error(errors_found, errmsg.format(title, etype)) + # end if + for mind, mvar in enumerate(mlist): + lname = mvar.get_prop_value('local_name') + arrayref = is_arrayspec(lname) + fvar, find = find_var_in_list(lname, flist) + if mind >= flen: + if arrayref: + # Array reference, variable not in Fortran table + pass + elif fvar is None: + errmsg = 'No Fortran variable for {} in {}' + errors_found = add_error(errors_found, + errmsg.format(lname, title)) + # end if (no else, we already reported an out-of-place error + # Do not break to collect all missing variables + continue + # end if + # At this point, we should have a Fortran variable + if (not arrayref) and (fvar is None): + errmsg = 'Variable mismatch in {}, no Fortran variable {}.' + errors_found = add_error(errors_found, errmsg.format(title, + lname)) + continue + # end if + # Check order dependence + if fht in _ORDERED_TABLE_TYPES: + if find != mind: + errmsg = 'Out of order argument, {} in {}' + errors_found = add_error(errors_found, + errmsg.format(lname, title)) + continue + # end if + # end if + if arrayref: + # Array reference, do not look for this in Fortran table + continue + # end if + errs = var_comp('local_name', mvar, fvar, title) + if errs: + errors_found = add_error(errors_found, errs) + else: + errs = var_comp('type', mvar, fvar, title) + if errs: + errors_found = add_error(errors_found, errs) + # end if + errs = var_comp('kind', mvar, fvar, title) + if errs: + errors_found = add_error(errors_found, errs) + # end if + if meta_header.header_type == SCHEME_HEADER_TYPE: + errs = var_comp('intent', mvar, fvar, title) + if errs: + errors_found = add_error(errors_found, errs) + # end if + # end if + # Compare dimensions + errs = dims_comp(meta_header, mvar, fvar, title, logger) + if errs: + errors_found = add_error(errors_found, errs) + # end if + # end if + # end for + # end if + return errors_found + +############################################################################### +def check_fortran_against_metadata(meta_headers, fort_headers, + mfilename, ffilename, logger): +############################################################################### + """Compare a set of metadata headers from against the + code in the associated Fortran file, . + NB: This routine destroys the list, but returns the + contents in an association dictionary on successful completion.""" + header_dict = {} # Associate a Fortran header for every metadata header + for mheader in meta_headers: + fheader = None + mtitle = mheader.title + for findex in range(len(fort_headers)): #pylint: disable=consider-using-enumerate + if fort_headers[findex].title == mtitle: + fheader = fort_headers.pop(findex) + break + # end if + # end for + if fheader is None: + tlist = '\n '.join([x.title for x in fort_headers]) + logger.debug("CCPP routines in {}:{}".format(ffilename, tlist)) + errmsg = "No matching Fortran routine found for {} in {}" + raise CCPPError(errmsg.format(mtitle, ffilename)) + # end if + header_dict[mheader] = fheader + # end if + # end while + if fort_headers: + errmsg = "" + sep = "" + for fheader in fort_headers: + if fheader.has_variables: + errmsg += sep + "No matching metadata header found for {} in {}" + errmsg = errmsg.format(fheader.title, mfilename) + sep = "\n" + # end if + # end for + if errmsg: + raise CCPPError(errmsg) + # end if + # end if + # We have a one-to-one set, compare headers + errors_found = '' + for mheader in header_dict: + fheader = header_dict[mheader] + errors_found += compare_fheader_to_mheader(mheader, fheader, logger) + # end for + if errors_found: + num_errors = len(re.findall(r'\n', errors_found)) + 1 + errmsg = "{}\n{} error{} found comparing {} to {}" + raise CCPPError(errmsg.format(errors_found, num_errors, + 's' if num_errors > 1 else '', + mfilename, ffilename)) + # end if + # No return, an exception is raised on error + +############################################################################### +def duplicate_item_error(title, filename, itype, orig_item): +############################################################################### + """Raise an error indicating a duplicate item of type, """ + errmsg = "Duplicate {typ}, {title}, found in {file}" + edict = {'title':title, 'file':filename, 'typ':itype} + ofile = orig_item.context.filename + if ofile is not None: + errmsg = errmsg + ", original found in {ofile}" + edict['ofile'] = ofile + # end if + raise CCPPError(errmsg.format(**edict)) + +############################################################################### +def parse_host_model_files(host_filenames, preproc_defs, host_name, logger): +############################################################################### + """ + Gather information from host files (e.g., DDTs, registry) and + return a host model object with the information. + """ + header_dict = {} + table_dict = {} + known_ddts = list() + for filename in host_filenames: + logger.info('Reading host model data from {}'.format(filename)) + # parse metadata file + mtables = parse_metadata_file(filename, known_ddts, logger) + fort_file = find_associated_fortran_file(filename) + ftables = parse_fortran_file(fort_file, preproc_defs=preproc_defs, + logger=logger) + # Check Fortran against metadata (will raise an exception on error) + mheaders = list() + for sect in [x.sections() for x in mtables]: + mheaders.extend(sect) + # end for + fheaders = list() + for sect in [x.sections() for x in ftables]: + fheaders.extend(sect) + # end for + check_fortran_against_metadata(mheaders, fheaders, + filename, fort_file, logger) + # Check for duplicate tables, then add to dict + for table in mtables: + if table.table_name in table_dict: + duplicate_item_error(table.table_name, filename, + table.table_type, table_dict[header.title]) + else: + table_dict[table.table_name] = table + # end if + # end for + # Check for duplicate headers, then add to dict + for header in mheaders: + if header.title in header_dict: + duplicate_item_error(header.title, filename, + header.header_type, + header_dict[header.title]) + else: + header_dict[header.title] = header + if header.header_type == 'ddt': + known_ddts.append(header.title) + # end if + # end for + # end for + if not host_name: + host_name = None + # end if + host_model = HostModel(table_dict, host_name, logger) + return host_model + +############################################################################### +def parse_scheme_files(scheme_filenames, preproc_defs, logger): +############################################################################### + """ + Gather information from scheme files (e.g., init, run, and finalize + methods) and return resulting dictionary. + """ + table_dict = {} # Duplicate check and for dependencies processing + header_dict = {} # To check for duplicates + known_ddts = list() + for filename in scheme_filenames: + logger.info('Reading CCPP schemes from {}'.format(filename)) + # parse metadata file + mtables = parse_metadata_file(filename, known_ddts, logger) + fort_file = find_associated_fortran_file(filename) + ftables = parse_fortran_file(fort_file, preproc_defs=preproc_defs, + logger=logger) + # Check Fortran against metadata (will raise an exception on error) + mheaders = list() + for sect in [x.sections() for x in mtables]: + mheaders.extend(sect) + # end for + fheaders = list() + for sect in [x.sections() for x in ftables]: + fheaders.extend(sect) + # end for + check_fortran_against_metadata(mheaders, fheaders, + filename, fort_file, logger) + # Check for duplicate tables, then add to dict + for table in mtables: + if table.table_name in table_dict: + duplicate_item_error(table.table_name, filename, + table.table_type, table_dict[header.title]) + else: + table_dict[table.table_name] = table + # end if + # end for + # Check for duplicate headers, then add to dict + for header in mheaders: + if header.title in header_dict: + duplicate_item_error(header.title, filename, header.header_type, + header_dict[header.title]) + else: + header_dict[header.title] = header + if header.header_type == 'ddt': + known_ddts.append(header.title) + # end if + # end if + # end for + # end for + return header_dict.values(), table_dict + +############################################################################### +def clean_capgen(cap_output_file, logger): +############################################################################### + """Attempt to remove the files created by the last invocation of capgen""" + log_level = logger.getEffectiveLevel() + set_log_level(logger, logging.INFO) + if os.path.exists(cap_output_file): + logger.info("Cleaning capgen files from {}".format(cap_output_file)) + delete_pathnames_from_file(cap_output_file, logger) + else: + emsg = "Unable to run clean, {} not found" + logger.error(emsg.format(cap_output_file)) + # end if + set_log_level(logger, log_level) + +############################################################################### +def capgen(host_files, scheme_files, suites, datatable_file, preproc_defs, + gen_hostcap, gen_docfiles, output_dir, host_name, kind_phys, + force_overwrite, logger): +############################################################################### + """Parse indicated host, scheme, and suite files. + Generate code to allow host model to run indicated CCPP suites.""" + # We need to create three lists of files, hosts, schemes, and SDFs + host_files = create_file_list(host_files, ['meta'], 'Host', logger) + scheme_files = create_file_list(scheme_files, ['meta'], 'Scheme', logger) + sdfs = create_file_list(suites, ['xml'], 'Suite', logger) + check_for_writeable_file(datatable_file, "Cap output datatable") + ##XXgoldyXX: Temporary warning + if gen_docfiles: + raise CCPPError("--generate-docfiles not yet supported") + # end if + # Turn preproc_defs into a dictionary, start with a list to process + if isinstance(preproc_defs, list): + # Someone already handed us a list + preproc_list = preproc_defs + elif (not preproc_defs) or (preproc_defs == 'UNSET'): + # No preprocessor definitions + preproc_list = list() + elif ',' in preproc_defs: + # String of definitions, separated by commas + preproc_list = [x.strip() for x in preproc_defs.split(',')] + else: + # String of definitions, separated by spaces + preproc_list = [x.strip() for x in preproc_defs.split(' ') if x] + # end if + # Turn the list into a dictionary + preproc_defs = {} + for item in preproc_list: + tokens = [x.strip() for x in item.split('=', 1)] + key = tokens[0] + if key[0:2] == '-D': + key = key[2:] + # end if + if len(tokens) > 1: + value = tokens[1] + else: + value = None + # end if + preproc_defs[key] = value + # end for + # First up, handle the host files + host_model = parse_host_model_files(host_files, preproc_defs, + host_name, logger) + # Next, parse the scheme files + scheme_headers, scheme_tdict = parse_scheme_files(scheme_files, + preproc_defs, logger) + ddts = host_model.ddt_lib.keys() + if ddts: + logger.debug("DDT definitions = {}".format(ddts)) + # end if + plist = host_model.prop_list('local_name') + logger.debug("{} variables = {}".format(host_model.name, plist)) + logger.debug("schemes = {}".format([x.title for x in scheme_headers])) + # Finally, we can get on with writing suites + # Make sure to write to temporary location if files exist in + if not os.path.exists(output_dir): + # Try to create output_dir (let it crash if it fails) + os.makedirs(output_dir) + # Nothing here, use it for output + outtemp_dir = output_dir + elif not os.listdir(output_dir): + # Nothing here, use it for output + outtemp_dir = output_dir + else: + # We need to create a temporary staging area, create it here + outtemp_name = "ccpp_temp_scratch_dir" + outtemp_dir = os.path.join(output_dir, outtemp_name) + if os.path.exists(outtemp_dir): + remove_dir(outtemp_dir, force=True) + # end if + os.makedirs(outtemp_dir) + # end if + ccpp_api = API(sdfs, host_model, scheme_headers, logger) + cap_filenames = ccpp_api.write(outtemp_dir, logger) + if gen_hostcap: + # Create a cap file + host_files = [write_host_cap(host_model, ccpp_api, outtemp_dir, logger)] + else: + host_files = list() + # end if + # Create the kinds file + kinds_file = create_kinds_file(kind_phys, outtemp_dir, logger) + # Move any changed files to output_dir and remove outtemp_dir + move_modified_files(outtemp_dir, output_dir, + overwrite=force_overwrite, remove_src=True) + # We have to rename the files we created + if outtemp_dir != output_dir: + replace_paths(cap_filenames, outtemp_dir, output_dir) + replace_paths(host_files, outtemp_dir, output_dir) + kinds_file = kinds_file.replace(outtemp_dir, output_dir) + # end if + # Finally, create the database of generated files and caps + # This can be directly in output_dir because it will not affect dependencies + src_dir = os.path.join(__FRAMEWORK_ROOT, "src") + generate_ccpp_datatable(datatable_file, host_model, ccpp_api, + scheme_headers, scheme_tdict, host_files, + cap_filenames, kinds_file, src_dir) + +############################################################################### +def _main_func(): +############################################################################### + """Parse command line, then parse indicated host, scheme, and suite files. + Finally, generate code to allow host model to run indicated CCPP suites.""" + args = parse_command_line(sys.argv[1:], __doc__) + verbosity = args.verbose + if verbosity > 1: + set_log_level(_LOGGER, logging.DEBUG) + elif verbosity > 0: + set_log_level(_LOGGER, logging.INFO) + # end if + # Make sure we know where output is going + output_dir = os.path.abspath(args.output_root) + if os.path.abspath(args.ccpp_datafile): + datatable_file = args.ccpp_datafile + else: + datatable_file = os.path.abspath(os.path.join(output_dir, + args.ccpp_datafile)) + # end if + ## A few sanity checks + ## Make sure output directory is legit + if os.path.exists(output_dir): + if not os.path.isdir(output_dir): + errmsg = "output-root, '{}', is not a directory" + raise CCPPError(errmsg.format(args.output_root)) + # end if + if not os.access(output_dir, os.W_OK): + errmsg = "Cannot write files to output-root ({})" + raise CCPPError(errmsg.format(args.output_root)) + # end if (output_dir is okay) + else: + # Try to create output_dir (let it crash if it fails) + os.makedirs(output_dir) + # end if + # Make sure we can create output file lists + if not os.path.isabs(datatable_file): + datatable_file = os.path.normpath(os.path.join(output_dir, + datatable_file)) + # end if + if args.clean: + clean_capgen(datatable_file, _LOGGER) + else: + generate_host_cap = args.host_name != '' + preproc_defs = args.preproc_directives + capgen(args.host_files, args.scheme_files, args.suites, datatable_file, + preproc_defs, generate_host_cap, + args.generate_docfiles, output_dir, args.host_name, + args.kind_phys, args.force_overwrite, _LOGGER) + # end if (clean) + +############################################################################### + +if __name__ == "__main__": + try: + _main_func() + sys.exit(0) + except ParseInternalError as pie: + _LOGGER.exception(pie) + sys.exit(-1) + except CCPPError as ccpp_err: + if _LOGGER.getEffectiveLevel() <= logging.DEBUG: + _LOGGER.exception(ccpp_err) + else: + _LOGGER.error(ccpp_err) + # end if + sys.exit(1) + finally: + logging.shutdown() + # end try diff --git a/scripts/ccpp_datafile.py b/scripts/ccpp_datafile.py new file mode 100755 index 00000000..b25df8a8 --- /dev/null +++ b/scripts/ccpp_datafile.py @@ -0,0 +1,1042 @@ +#!/usr/bin/env python + +"""Code to generate and query the CCPP datafile returned by capgen. +The CCPP datafile is a database consisting of several tables: +- A list of all generated files, broken into groups for host cap, + suite caps, and ccpp_kinds. +- A list of scheme entries, keyed by scheme name +- A list of CCPP metadata files actually used by capgen, broken into groups + for host-model metadata and scheme metadata. These filenames may serve + as keys +- A list of variable entries, keyed by standard name. +""" + +## NB: A new report must be added in two places: +## 1) In the list of DatatableReport._valid_reports +## 2) As an option in datatable_report + +# Python library imports +import argparse +import os +import re +import sys +import xml.etree.ElementTree as ET +# CCPP framework imports +from ccpp_suite import VerticalLoop, Subcycle +from parse_tools import read_xml_file +from metadata_table import UNKNOWN_PROCESS_TYPE +from metavar import Var + +# Find python version +PY3 = sys.version_info[0] > 2 +PYSUBVER = sys.version_info[1] + +# Global data +_INDENT_STR = " " +beg_tag_re = re.compile(r"([<][^/][^<>]*[^/][>])") +end_tag_re = re.compile(r"([<][/][^<>/]+[>])") +simple_tag_re = re.compile(r"([<][^/][^<>/]+[/][>])") + +## datatable_report must have an action for each report type +_VALID_REPORTS = [{"report" : "host_files", "type" : bool, + "help" : + "Return a list of host CAP files created by capgen"}, + {"report" : "suite_files", "type" : bool, + "help" : + "Return a list of suite CAP files created by capgen"}, + {"report" : "utility_files", "type" : bool, + "help" : ("Return a list of utility files created by " + "capgen (e.g., ccpp_kinds.F90)")}, + {"report" : "ccpp_files", "type" : bool, + "help" : "Return a list of all files created by capgen"}, + {"report" : "process_list", "type" : bool, + "help" : ("Return a list of process types and implementing " + "scheme name")}, + {"report" : "module_list", "type" : bool, + "help" : + "Return a list of module names used in this set of suites"}, + {"report" : "dependencies", "type" : bool, + "help" : ("Return a list of scheme and host " + "dependency module names")}, + {"report" : "suite_list", "type" : bool, + "help" : "Return a list of configured suite names"}, + {"report" : "required_variables", "type" : str, + "help" : ("Return a list of required variable " + "standard names for suite, "), + "metavar" : "SUITE_NAME"}, + {"report" : "input_variables", "type" : str, + "help" : ("Return a list of required input variable " + "standard names for suite, "), + "metavar" : "SUITE_NAME"}, + {"report" : "output_variables", "type" : str, + "help" : ("Return a list of required output variable " + "standard names for suite, "), + "metavar" : "SUITE_NAME"}, + {"report" : "host_variables", "type" : bool, + "help" : ("Return a list of required host model variable " + "standard names")}, + {"report" : "show", "type" : bool, + "help" : + "Pretty print the database contents to the screen"}] + +### +### Utilities +### + +class CCPPDatatableError(ValueError): + """Error specific to errors found in the CCPP capgen datafile""" + pass + +class DatatableReport(object): + """A class to hold a database report type and inquiry function""" + + __valid_actions = [x["report"] for x in _VALID_REPORTS] + + def __init__(self, action, value=True): + """Initialize this report as report-type, """ + if action in DatatableReport.__valid_actions: + self.__action = action + self.__value = value + else: + raise ValueError("Invalid action, '{}'".format(action)) + # end if + + def action_is(self, action): + """If matches this report type, return True. + Otherwise, return False""" + return action == self.__action + + @property + def action(self): + """Return this action's action""" + return self.__action + + @property + def value(self): + """Return this action's value""" + return self.__value + + @classmethod + def valid_actions(cls): + """Return the list of valid actions for this class""" + return cls.__valid_actions + +class PrettyElementTree(ET.ElementTree): + """An ElementTree subclass with nice formatting when writing to a file""" + + def __init__(self, element=None, file=None): + """Initialize a PrettyElementTree object""" + super(PrettyElementTree, self).__init__(element, file) + + def _write(self, outfile, line, indent, eol=os.linesep): + """Write as an ASCII string to """ + outfile.write('{}{}{}'.format(_INDENT_STR*indent, line, eol)) + + def write(self, file, encoding="us-ascii", xml_declaration=None, + default_namespace=None, method="xml", + short_empty_elements=True): + """Subclassed write method to format output.""" + if PY3 and (PYSUBVER >= 4): + if PYSUBVER >= 8: + input = ET.tostring(self.getroot(), + encoding=encoding, method=method, + xml_declaration=xml_declaration, + default_namespace=default_namespace, + short_empty_elements=short_empty_elements) + else: + input = ET.tostring(self.getroot(), + encoding=encoding, method=method, + short_empty_elements=short_empty_elements) + # end if + else: + input = ET.tostring(self.getroot(), + encoding=encoding, method=method) + # end if + if PY3: + fmode = 'wt' + root = str(input, encoding="utf-8") + else: + fmode = 'w' + root = input + # end if + indent = 0 + last_write_text = False + with open(file, fmode) as outfile: + inline = root.strip() + istart = 0 # Current start pos + iend = len(inline) + while istart < iend: + bmatch = beg_tag_re.match(inline[istart:]) + ematch = end_tag_re.match(inline[istart:]) + smatch = simple_tag_re.match(inline[istart:]) + if bmatch is not None: + outstr = bmatch.group(1) + if inline[istart + len(bmatch.group(1))] != '<': + # Print text on same line + self._write(outfile, outstr, indent, eol='') + else: + self._write(outfile, outstr, indent) + # end if + indent += 1 + istart += len(outstr) + last_write_text = False + elif ematch is not None: + outstr = ematch.group(1) + indent -= 1 + if last_write_text: + self._write(outfile, outstr, 0) + last_write_text = False + else: + self._write(outfile, outstr, indent) + # end if + istart += len(outstr) + elif smatch is not None: + outstr = smatch.group(1) + self._write(outfile, outstr, indent) + istart += len(outstr) + last_write_text = False + else: + # No tag, just output text + end_index = inline[istart:].find('<') + if end_index < 0: + end_index = iend + else: + end_index += istart + # end if + outstr = inline[istart:end_index] + self._write(outfile, outstr.strip(), 0, eol='') + last_write_text = True + istart += len(outstr) + # end if + # end while + # end with + +### +### Interface for retrieving datatable information +### + +############################################################################### +def _command_line_parser(): +############################################################################### + "Create and return an ArgumentParser for parsing the command line" + description = """ + Retrieve information about a ccpp_capgen run. + The returned information is controlled by selecting an action from + the list of optional arguments below. + Note that exactly one action is required. + """ + parser = argparse.ArgumentParser(description=description) + parser.add_argument("datatable", type=str, + help="Path to a data table XML file created by capgen") + ### Only one action per call + group = parser.add_mutually_exclusive_group(required=True) + for report in _VALID_REPORTS: + rep_type = "--{}".format(report["report"].replace("_", "-")) + if report["type"] is bool: + group.add_argument(rep_type, action='store_true', default=False, + help=report["help"]) + elif report["type"] is str: + if "metavar" in report: + group.add_argument(rep_type, required=False, type=str, + metavar=report["metavar"], default='', + help=report["help"]) + else: + group.add_argument(rep_type, required=False, type=str, + default='', help=report["help"]) + # end if + else: + raise ValueError("Unknown report type, '{}'".format(report["type"])) + # end if + # end for + ### + defval = "," + help_str = "String to separate items in a list (default: '{}')" + parser.add_argument("--separator", type=str, required=False, default=defval, + metavar="SEP", dest="sep", help=help_str.format(defval)) + defval = False + help_str = ("Exclude protected variables (only has an effect if the " + "requested report is returning a list of variables)." + " (default: {})") + parser.add_argument("--exclude-protected", action='store_true', + required=False, + default=defval, help=help_str.format(defval)) + defval = -1 + help_str = ("Screen width for '--show' line wrapping. -1 means do not " + "wrap. (default: {})") + parser.add_argument("--line-wrap", type=int, required=False, + metavar="LINE_WIDTH", dest="line_wrap", + default=defval, help=help_str.format(defval)) + defval = 2 + help_str = "Indent depth for '--show' output (default: {})" + parser.add_argument("--indent", type=int, required=False, default=2, + help=help_str.format(defval)) + return parser + +############################################################################### +def parse_command_line(args): +############################################################################### + """Create an ArgumentParser to parse and return command-line arguments""" + parser = _command_line_parser() + pargs = parser.parse_args(args) + return pargs + +### +### Accessor functions to retrieve information from a datatable file +### + +############################################################################### +def _read_datatable(datatable): +############################################################################### + """Read the XML file, and return its root node""" + _, datatable = read_xml_file(datatable, None) # No logger + return datatable + +############################################################################### +def _find_table_section(table, elem_type): +############################################################################### + """Look for and return an element type, , in . + Raise an exception if the element is not found.""" + found = table.find(elem_type) + if found is None: + emsg = "Element type, '{}', not found in table" + raise CCPPDatatableError(emsg.format(elem_type)) + # end if + return found + +############################################################################### +def _retrieve_ccpp_files(table, file_type=None): +############################################################################### + """Find and retrieve a list of generated filenames from
. + If is not None, only return that file type.""" + ccpp_files = list() + # Find the files section + for section in _find_table_section(table, "ccpp_files"): + if (not file_type) or (section.tag == file_type): + for entry in section: + if entry.tag == "file": + ccpp_files.append(entry.text) + else: + emsg = "Invalid file list entry type, '{}'" + raise CCPPDatatableError(emsg.format(entry.tag)) + # end if + # end for + # end if + # end if + return ccpp_files + +############################################################################### +def _retrieve_process_list(table): +############################################################################### + """Find and return a list of all physics scheme processes in
.""" + result = list() + schemes = table.find("schemes") + if schemes is None: + raise CCPPDatatableError("Could not find 'schemes' element") + # end if + for scheme in schemes: + name = scheme.get("name") + proc = scheme.get("process") + if proc: + result.append("{}={}".format(proc, name)) + # end if + # end for + return result + +############################################################################### +def _retrieve_module_list(table): +############################################################################### + """Find and return a list of all scheme modules in
.""" + result = set() + schemes = table.find("schemes") + if schemes is None: + raise CCPPDatatableError("Could not find 'schemes' element") + # end if + for scheme in schemes: + for phase in scheme: + module = phase.get("module") + if module is not None: + result.add(module) + # end if + # end for + # end for + return sorted(result) + +############################################################################### +def _retrieve_dependencies(table): +############################################################################### + """Find and return a list of all host and scheme dependencies.""" + result = set() + depends = table.find("dependencies") + if depends is None: + raise CCPPDatatableError("Could not find 'dependencies' element") + # end if + for dependency in depends: + dep_file = dependency.text + if dep_file is not None: + result.add(dep_file) + # end if + # end for + return sorted(result) + +############################################################################### +def _find_var_dictionary(table, dict_name=None, dict_type=None): +############################################################################### + """Find and return a var_dictionary named, in
. + If not found, return None""" + var_dicts = table.find("var_dictionaries") + target_dict = None + if (dict_name is None) and (dict_type is None): + raise ValueError(("At least one of or must " + "contain a string")) + # end if + for vdict in var_dicts: + if (((dict_name is None) or (vdict.get("name") == dict_name)) and + ((dict_type is None) or (vdict.get("type") == dict_type))): + target_dict = vdict + break + # end if + # end for + return target_dict + +############################################################################### +def _retrieve_suite_list(table): +############################################################################### + """Find and return a list of all suites found in
.""" + result = list() + # First, find the API variable dictionary + api_elem = table.find("api") + if api_elem is not None: + suites_elem = api_elem.find("suites") + if suites_elem is not None: + for suite in suites_elem: + result.append(suite.get("name")) + # end for + # end if + # end if + return result + +############################################################################### +def _retrieve_suite_group_names(table, suite_name): +############################################################################### + """Find and return a list of the group names for this suite.""" + result = list() + # First, find the API variable dictionary + api_elem = table.find("api") + if api_elem is not None: + suites_elem = api_elem.find("suites") + if suites_elem is not None: + for suite in suites_elem: + if suite.get("name") == suite_name: + for item in suite: + if item.tag == "group": + result.append(item.get("name")) + # end if + # end for + # end if + # end for + # end if + # end if + return result + +############################################################################### +def _is_variable_protected(table, var_name, var_dict): +############################################################################### + """Determine whether variable, , from is protected. + So this by checking for the 'protected' attribute for in + or any of 's ancestors (parent dictionaries). + """ + protected = False + while (not protected) and (var_dict is not None): + dvars = var_dict.find("variables") + if dvars is not None: + for var in dvars: + if var.get("name") == var_name: + protected = var.get("protected", default="False") == "True" + break + # end if + # end for + # end if + parent = var_dict.get("parent") + if parent is not None: + var_dict = _find_var_dictionary(table, dict_name=parent) + else: + var_dict = None + # end if + # end while + return protected + +############################################################################### +def _retrieve_variable_list(table, suite_name, + intent_type=None, excl_prot=True): +############################################################################### + """Find and return a list of all the required variables in . + If suite, , is not found in
, return an empty list. + If is present, return only that variable type (input or + output). + If is True, do not include protected variables""" + # Note that suites do not have call lists so we have to collect + # all the variables from the suite's groups. + var_set = set() + excl_vars = list() + if intent_type == "host": + allowed_intents = list() + elif intent_type is None: + allowed_intents = ['in', 'out', 'inout'] + elif intent_type == "input": + allowed_intents = ['in', 'inout'] + elif intent_type == "output": + allowed_intents = ['out', 'inout'] + else: + emsg = "Invalid intent_type, '{}'" + raise CCPPDatatableError(emsg.format(intent_type)) + # end if + if excl_prot or (intent_type == "host"): + host_dict = _find_var_dictionary(table, dict_type="host") + if host_dict is not None: + hvars = host_dict.find("variables") + if hvars is not None: + for var in hvars: + vname = var.get("name") + if excl_prot: + exclude = _is_variable_protected(table, vname, + host_dict) + else: + exclude = False + # end if + if intent_type == "host": + if not exclude: + # Add to host variable set + var_set.add(vname) + # end if + else: + if exclude: + # Add to list of protected variables + excl_vars.append(vname) + # end if + # end if + # end for + # end if + # end if + # end if + if intent_type != "host": + group_names = _retrieve_suite_group_names(table, suite_name) + for group in group_names: + cl_name = group + "_call_list" + group_dict = _find_var_dictionary(table, dict_name=cl_name, + dict_type="group_call_list") + if group_dict is not None: + gvars = group_dict.find("variables") + if gvars is not None: + for var in gvars: + vname = var.get("name") + vintent = var.get("intent") + if excl_prot: + exclude = vname in excl_vars + if not exclude: + exclude = _is_variable_protected(table, vname, + group_dict) + # end if + else: + exclude = False + # end if + if (vintent in allowed_intents) and (not exclude): + var_set.add(vname) + # end if + # end for + # end if + # end if + # end for + # end if + return sorted(var_set) + +############################################################################### +def datatable_report(datatable, action, sep, excl_prot=False): +############################################################################### + """Perform a lookup on and return the result. + """ + if not action: + emsg = "datatable_report: An action is required\n" + emsg += _command_line_parser().format_usage() + raise ValueError(emsg) + # end if + if not sep: + emsg = "datatable_report: A separator character () is required\n" + emsg += _command_line_parser().format_usage() + raise ValueError(emsg) + # end if + table = _read_datatable(datatable) + if action.action_is("ccpp_files"): + result = _retrieve_ccpp_files(table) + elif action.action_is("host_files"): + result = _retrieve_ccpp_files(table, file_type="host_files") + elif action.action_is("suite_files"): + result = _retrieve_ccpp_files(table, file_type="suite_files") + elif action.action_is("utility_files"): + result = _retrieve_ccpp_files(table, file_type="utilities") + elif action.action_is("process_list"): + result = _retrieve_process_list(table) + elif action.action_is("module_list"): + result = _retrieve_module_list(table) + elif action.action_is("dependencies"): + result = _retrieve_dependencies(table) + elif action.action_is("suite_list"): + result = _retrieve_suite_list(table) + elif action.action_is("required_variables"): + result = _retrieve_variable_list(table, action.value, + excl_prot=excl_prot) + elif action.action_is("input_variables"): + result = _retrieve_variable_list(table, action.value, + intent_type="input", + excl_prot=excl_prot) + elif action.action_is("output_variables"): + result = _retrieve_variable_list(table, action.value, + intent_type="output", + excl_prot=excl_prot) + elif action.action_is("host_variables"): + result = _retrieve_variable_list(table, "host", excl_prot=excl_prot, + intent_type="host") + else: + result = '' + # end if + if isinstance(result, list): + result = sep.join(result) + # end if + return result + +############################################################################### +def _indent_str(indent): +############################################################################### + """Return the line start string for indent level, .""" + return _INDENT_STR*indent + +############################################################################### +def _format_line(line_in, indent, line_wrap, increase_indent=True): +############################################################################### + """Format into separate lines in an attempt to not have the + length of any line greater than characters including any + indent (with indent level specified by ). + If is True, increase the indent level for new lines + created by the process. + A value of less one means do not wrap the line. + """ + in_squote = False + in_dquote = False + outline = '' + indent_str = _indent_str(indent) + curr_indent = len(indent_str) + wrap_points = list() + line = line_in.strip() + llen = len(line) + # Do we need to wrap the line? + if (line_wrap <= 0) or (llen + curr_indent <= line_wrap): + index = llen + 1 + else: + index = 0 + # end if + # Collect possible wrap points + while index < llen: + inchar = line[index] + if in_squote: + if inchar == "'": + in_squote = False + # end if (else do nothing) + elif in_dquote: + if inchar == '"': + in_dquote = False + # end if (else do nothing) + elif inchar == ' ': + wrap_points.append(index + curr_indent) + # end if (else it is not an interesting character) + index += 1 + # end while + if (line_wrap <= 0) or (llen + curr_indent <= line_wrap): + this_line = indent_str + line + next_line = "" + else: + # Find the best break point + good_points = [x for x in wrap_points if x <= line_wrap] + if increase_indent: + indent += 2 # To indent past child tags + # end if + if good_points: + wrap = max(good_points) - curr_indent + this_line = indent_str + line[0:wrap] + next_line = _format_line(line[wrap+1:], indent, line_wrap, + increase_indent=False) + elif wrap_points: + wrap = min(wrap_points) - curr_indent + this_line = indent_str + line[0:wrap] + next_line = _format_line(line[wrap+1:], indent, line_wrap, + increase_indent=False) + else: + this_line = indent_str + line + next_line = "" + # end if + # end if + outline = this_line + '\n' + next_line + return outline + +############################################################################### +def table_entry_pretty_print(entry, indent, line_wrap=-1): +############################################################################### + """Create and return a pretty print string of the contents of """ + output = "" + outline = "<{}".format(entry.tag) + for name in entry.attrib: + outline += " {}={}".format(name, entry.attrib[name]) + # end for + has_children = len(list(entry)) > 0 + has_text = entry.text + if has_children or has_text: + # We have sub-structure, close and print this tag + outline += ">" + output += _format_line(outline, indent, line_wrap) + else: + # No sub-structure, we are done with this tag + outline += " />" + output += _format_line(outline, indent, line_wrap) + # end if + if has_children: + for child in entry: + output += table_entry_pretty_print(child, indent+1, + line_wrap=line_wrap) + # end for + # end if + if has_text: + output += _format_line(entry.text, indent+1, line_wrap) + # end if + if has_children or has_text: + # We had sub-structure, print the close tag + outline = "".format(entry.tag) + output = output.rstrip() + '\n' + _format_line(outline, + indent, line_wrap) + # end if + return output + +############################################################################### +def datatable_pretty_print(datatable, indent, line_wrap): +############################################################################### + """Create and return a pretty print string of the contents of """ + indent = 0 + table = _read_datatable(datatable) + report = table_entry_pretty_print(table, indent, line_wrap=line_wrap) + return report + +### +### Functions to create the datatable file +### + +############################################################################### +def _object_type(pobj): +############################################################################### + """Return an XML-acceptable string for the type of .""" + return pobj.__class__.__name__.lower() + +############################################################################### +def _new_var_entry(parent, var, full_entry=True): +############################################################################### + """Create a variable sub-element of with information from . + If is False, only include standard name and intent. + """ + prop_list = ["intent"] + if full_entry: + prop_list.extend(["local_name", "type", "kind", "units", + "diagnostic_name", "diagnostic_name_fixed", + "default_value", "protected"]) + prop_list.extend(Var.constituent_property_names()) + # end if + ventry = ET.SubElement(parent, "var") + ventry.set("name", var.get_prop_value("standard_name")) + for prop in prop_list: + value = var.get_prop_value(prop) + if value: + ventry.set(prop, str(value)) + # end if + # end for + if full_entry: + dims = var.get_dimensions() + if dims: + dim_entry = ET.SubElement(ventry, "dimensions") + dim_entry.text = " ".join(dims) + # end if + # end if + +############################################################################### +def _new_scheme_entry(parent, scheme, group_name, scheme_headers): +############################################################################### + """Create a new XML entry for under """ + sch_name = scheme.name + sch_entry = parent.find(sch_name) + process = None + if not sch_entry: + sch_entry = ET.SubElement(parent, "scheme") + sch_entry.set("name", sch_name) + # end if + if scheme.run_phase(): + sch_tag = group_name + else: + sch_tag = scheme.phase() + # end if + if not sch_tag: + emsg = "No phase info for scheme, '{}', group = '{}" + raise CCPPDatatableError(emsg.format(sch_name, group_name)) + # end if + phase_entry = sch_entry.find(sch_tag) + if phase_entry: + pname = phase_entry.get("name") + if pname != sch_name: + emsg = "Scheme entry already exists for {} but name is {}" + raise CCPPDatatableError(emsg.format(sch_name, pname)) + # end if + else: + phase_entry = ET.SubElement(sch_entry, sch_tag) + phase_entry.set("name", sch_name) + title = scheme.subroutine_name + phase_entry.set("subroutine_name", title) + phase_entry.set("filename", scheme.context.filename) + if title in scheme_headers: + header = scheme_headers[title] + proc = header.process_type + if proc != UNKNOWN_PROCESS_TYPE: + if process: + if process != proc: + emsg = 'Inconsistent process, {} != {}' + raise CCPPDatatableError(emsg.format(proc, process)) + # end if (no else, things are okay) + else: + process = proc + # end if + # end if + module = header.module + if module: + phase_entry.set("module", module) + # end if + else: + emsg = 'Could not find metadata header for {}' + raise CCPPDatatableError(emsg.format(sch_name)) + # end if + call_list = ET.SubElement(phase_entry, "call_list") + vlist = scheme.call_list.variable_list() + for var in vlist: + _new_var_entry(call_list, var, full_entry=False) + # end for + # end if + if process: + sch_entry.set("process", proc) + # end if + +############################################################################### +def _new_variable_dictionary(dictionaries, var_dict, dict_type, parent=None): +############################################################################### + """Create a new XML entry for under .""" + dict_entry = ET.SubElement(dictionaries, "var_dictionary") + dict_entry.set("name", var_dict.name) + dict_entry.set("type", dict_type) + if parent is not None: + dict_entry.set("parent", parent.name) + # end if + sub_dicts = var_dict.sub_dictionaries() + if sub_dicts: + sd_entry = ET.SubElement(dict_entry, "sub_dictionaries") + sd_entry.text = " ".join([x.name for x in sub_dicts]) + # end if + vars_entry = ET.SubElement(dict_entry, "variables") + for var in var_dict.variable_list(): + _new_var_entry(vars_entry, var, full_entry=True) + # end for + +############################################################################### +def _add_suite_object_dictionaries(dictionaries, suite_object): +############################################################################### + """Create new XML entries for under . + Add 's dictionary and its call_list dictionary (if present). + Recurse to this objects parts.""" + dict_type = _object_type(suite_object) + _new_variable_dictionary(dictionaries, suite_object, dict_type, + parent=suite_object.parent) + if suite_object.call_list: + dict_type += "_call_list" + _new_variable_dictionary(dictionaries, suite_object.call_list, + dict_type, parent=suite_object.parent) + # end if + for part in suite_object.parts: + _add_suite_object_dictionaries(dictionaries, part) + # end for + +############################################################################### +def _add_dependencies(parent, scheme_depends, host_depends): +############################################################################### + """Add a section to that lists all the dependencies + required by schemes or the host model. + """ + file_entry = ET.SubElement(parent, "dependencies") + for hfile in host_depends: + entry = ET.SubElement(file_entry, "dependency") + entry.text = hfile + # end for + for sfile in scheme_depends: + entry = ET.SubElement(file_entry, "dependency") + entry.text = sfile + # end for + +############################################################################### +def _add_generated_files(parent, host_files, suite_files, ccpp_kinds, src_dir): +############################################################################### + """Add a section to that lists all the files generated + by in sections for host cap, suite caps, ccpp_kinds, and source files. + Also add existing utility files which are always needed by the framework. + """ + file_entry = ET.SubElement(parent, "ccpp_files") + utilities = ET.SubElement(file_entry, "utilities") + entry = ET.SubElement(utilities, "file") + entry.text = ccpp_kinds + entry = ET.SubElement(utilities, "file") + entry.text = os.path.join(src_dir, "ccpp_constituent_prop_mod.F90") + entry = ET.SubElement(utilities, "file") + entry.text = os.path.join(src_dir, "ccpp_hashable.F90") + entry = ET.SubElement(utilities, "file") + entry.text = os.path.join(src_dir, "ccpp_hash_table.F90") + host_elem = ET.SubElement(file_entry, "host_files") + for hfile in host_files: + entry = ET.SubElement(host_elem, "file") + entry.text = hfile + # end for + suite_elem = ET.SubElement(file_entry, "suite_files") + for sfile in suite_files: + entry = ET.SubElement(suite_elem, "file") + entry.text = sfile + # end for + +############################################################################### +def _add_suite_object(parent, suite_object): +############################################################################### + """Add an entry for under . This operation is + recursive to all the components inside of """ + obj_elem = ET.SubElement(parent, _object_type(suite_object)) + obj_elem.set("name", suite_object.name) + ptype = suite_object.phase_type + if ptype: + obj_elem.set("phase", ptype) + # end if + if isinstance(suite_object, VerticalLoop): + obj_elem.set("dimension_name", suite_object.dimension_name) + # end if + if isinstance(suite_object, Subcycle): + obj_elem.set("loop", suite_object.loop) + # end if + for obj_part in suite_object.parts: + _add_suite_object(obj_elem, obj_part) + # end for + +############################################################################### +def generate_ccpp_datatable(filename, host_model, api, scheme_headers, + scheme_tdict, host_files, suite_files, + ccpp_kinds, source_dir): +############################################################################### + """Write a CCPP datatable for to . + The datatable includes the generated filenames for the host cap, + the suite caps, the ccpp_kinds module, and source code files. + """ + # Define new tree + datatable = ET.Element("ccpp_datatable") + datatable.set("version", "1.0") + # Write out the generated files + _add_generated_files(datatable, host_files, suite_files, + ccpp_kinds, source_dir) + # Write out scheme info + schemes = ET.SubElement(datatable, "schemes") + # Create a dictionary of the scheme headers for easy lookup + scheme_header_dict = {} + for header in scheme_headers: + if header.title in scheme_header_dict: + emsg = 'Header {} already in dictionary' + raise CCPPDatatableError(emsg.format(header.title)) + # end if + scheme_header_dict[header.title] = header + # end for + # Dump all scheme info from the suites + for suite in api.suites: + for group in suite.groups: + gname = group.name + for scheme in group.schemes(): + _new_scheme_entry(schemes, scheme, gname, scheme_header_dict) + # end for + # end for + # end for + # Write the API + api_elem = ET.SubElement(datatable, "api") + suites_elem = ET.SubElement(api_elem, "suites") + for suite in api.suites: + suite_elem = ET.SubElement(suites_elem, "suite") + suite_elem.set("name", suite.name) + suite_elem.set("filename", suite.sdf_name) + for group in suite.groups: + # Skip empty groups + if group.parts: + _add_suite_object(suite_elem, group) + # end if + # end for + # end for + # Dump the variable dictionaries + var_dicts = ET.SubElement(datatable, "var_dictionaries") + # First, the top-level dictionaries + _new_variable_dictionary(var_dicts, host_model, "host") + _new_variable_dictionary(var_dicts, api, "api", parent=api.parent) + # Now, the suite and group namelists, etc. (including call_lists) + for suite in api.suites: + _new_variable_dictionary(var_dicts, suite, "suite", parent=suite.parent) + for group in suite.groups: + _add_suite_object_dictionaries(var_dicts, group) + # end for + # end for + # end for + # Add in all dependencies + scheme_depends = set() + for table in scheme_tdict: + for dep_file in scheme_tdict[table].dependencies: + scheme_depends.add(dep_file) + # end for + # end for + host_depends = set() + host_tables = host_model.metadata_tables() + for table in host_tables: + for dep_file in host_tables[table].dependencies: + host_depends.add(dep_file) + # end for + # end for + _add_dependencies(datatable, scheme_depends, host_depends) + # Write tree + datatable_tree = PrettyElementTree(datatable) + datatable_tree.write(filename) + +############################################################################### + +if __name__ == "__main__": + PARGS = parse_command_line(sys.argv[1:]) + if PARGS.show: + _INDENT_STR = " "*PARGS.indent + LINE_WRAP = PARGS.line_wrap + REPORT = datatable_pretty_print(PARGS.datatable, 0, line_wrap=LINE_WRAP) + else: + ARG_VARS = vars(PARGS) + _ACTION = None + _ERRMSG = '' + _ESEP = '' + for opt in ARG_VARS: + if (opt in DatatableReport.valid_actions()) and ARG_VARS[opt]: + if _ACTION: + _ERRMSG += _ESEP + "Duplicate action, '{}'".format(opt) + _ESEP = '\n' + else: + _ACTION = DatatableReport(opt, ARG_VARS[opt]) + # end if + # end if + # end for + if _ERRMSG: + raise ValueError(_ERRMSG) + # end if + REPORT = datatable_report(PARGS.datatable, _ACTION, + PARGS.sep, PARGS.exclude_protected) + # end if + print("{}".format(REPORT.rstrip())) + sys.exit(0) diff --git a/scripts/ccpp_fortran_to_metadata.py b/scripts/ccpp_fortran_to_metadata.py new file mode 100755 index 00000000..155cea87 --- /dev/null +++ b/scripts/ccpp_fortran_to_metadata.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python + +#pylint: disable=anomalous-backslash-in-string +""" +Create prototype CCPP metadata tables from Fortran files + +Parses annotated Fortran files to produce metadata files where the +standard_name, units, and dimension standard names must be filled in. +The annotation is a two line comment for every physics scheme, derived +data type (DDT) definition, or host model data section. +The annotation form is: + +!> \section arg_table_ Argument Table +!! \htmlinclude arg_table_.html + +where is the name of the scheme, the name of the DDT, or the +name of the module containing data to be included in the metadata file. +For a scheme, the annotation must appear just before the subroutine statement. +For a DDT definition, the annotation must appear just before the type statement. +For module data, the annotation should occur after any module variables + which should not be included in the metadata file. +Note that only CCPP interfaces (e.g., _run, _init, _final) + will be documented in this manner. All other routines should be left as is. +""" +#pylint: enable=anomalous-backslash-in-string + +# Python library imports +from __future__ import absolute_import +from __future__ import unicode_literals +from __future__ import print_function + +import argparse +import sys +import os +import os.path +import logging +# CCPP framework imports +from parse_tools import init_log, set_log_level +from parse_tools import CCPPError, ParseInternalError +from parse_tools import reset_standard_name_counter, unique_standard_name +from fortran_tools import parse_fortran_file +from file_utils import create_file_list +from metadata_table import blank_metadata_line + +## Init this now so that all Exceptions can be trapped +_LOGGER = init_log(os.path.basename(__file__)) + +## Recognized Fortran filename extensions +_FORTRAN_FILENAME_EXTENSIONS = ['F90', 'f90', 'F', 'f'] + +############################################################################### +def parse_command_line(args, description): +############################################################################### + "Create an ArgumentParser to parse and return command-line arguments" + parser = argparse.ArgumentParser(description=description, + formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument("files", metavar='', + type=str, + help="""Comma separated list of filenames to process +Filenames with a '.meta' suffix are treated as host model metadata files +Filenames with a '.txt' suffix are treated as containing a list of .meta +filenames""") + + parser.add_argument("--preproc-directives", + metavar='VARDEF1[,VARDEF2 ...]', type=str, default='', + help="Proprocessor directives used to correctly parse source files") + + parser.add_argument("--output-root", type=str, + metavar='', + default=os.getcwd(), + help="directory for generated files") + + parser.add_argument("--section-separator", type=str, default='', + help="""Comment line to separate CCPP metadata tables +(must start with a # or ; character)""") + + parser.add_argument("--verbose", action='count', default=0, + help="Log more activity, repeat for increased output") + pargs = parser.parse_args(args) + return pargs + +############################################################################### +def write_metadata_file(mfilename, ftables, sep): +############################################################################### + """ + Write the prototype metadata file, , based on the + headers () parsed from Fortran. + """ + # Write the metadata file with all the items collected from Fortran + with open(mfilename, 'w') as outfile: + header_sep = '' + table_name = '' + for table in ftables: + # Write the table properties section + # Note that there may be extra tables depending on how the + # Fortran was parsed. + if (not table_name) or (table_name != table.table_name): + outfile.write("{}[ccpp-table-properties]{}".format(header_sep, + os.linesep)) + header_sep = sep + os.linesep + table_name = table.table_name + outfile.write(" name = {}{}".format(table_name, os.linesep)) + outfile.write(" type = {}{}".format(table.table_type, + os.linesep)) + # end if + for header in table.sections(): + lname_dict = {'1':'ccpp_constant_one'} + outfile.write('{}[ccpp-arg-table]{}'.format(header_sep, + os.linesep)) + outfile.write(' name = {}{}'.format(header.title, + os.linesep)) + outfile.write(' type = {}{}'.format(header.header_type, + os.linesep)) + for var in header.variable_list(): + lname = var.get_prop_value('local_name') + outfile.write('[ {} ]{}'.format(lname, os.linesep)) + prop = var.get_prop_value('standard_name') + outfile.write(' standard_name = {}{}'.format(prop, + os.linesep)) + lname_dict[lname] = prop + prop = var.get_prop_value('units') + if not prop: + prop = 'enter_units' + # end if + outfile.write(' units = {}{}'.format(prop, os.linesep)) + tprop = var.get_prop_value('type') + kprop = var.get_prop_value('kind') + if tprop == kprop: + outfile.write(' ddt_type = {}'.format(tprop)) + else: + outfile.write(' type = {}'.format(tprop.lower())) + if kprop: + outfile.write(' | kind = {}'.format(kprop.lower())) + # end if + # end if + outfile.write(os.linesep) + dims = var.get_dimensions() + # Fill in standard names for dimensions + dlist = list() + if dims: + for dim in dims: + dslist = list() + for dimspec in dim.split(':'): + if dimspec and (dimspec in lname_dict): + dstr = lname_dict[dimspec] + else: + dstr = unique_standard_name() + # end if + dslist.append(dstr) + # end for + dlist.append(':'.join(dslist)) + # end for + # end if + prop = '(' + ','.join(dlist) + ')' + outfile.write(' dimensions = {}{}'.format(prop, + os.linesep)) + if header.header_type == 'scheme': + prop = var.get_prop_value('intent') + outfile.write(' intent = {}{}'.format(prop, + os.linesep)) + # end if + # end for + # end for + # end for + # end with + +############################################################################### +def parse_fortran_files(filenames, preproc_defs, output_dir, sep, logger): +############################################################################### + """ + Parse each file in and produce a prototype metadata file + with a metadata table for each arg_table entry in the file. + """ + meta_filenames = list() + for filename in filenames: + logger.info('Looking for arg_tables from {}'.format(filename)) + reset_standard_name_counter() + ftables = parse_fortran_file(filename, preproc_defs=preproc_defs, + logger=logger) + # Create metadata filename + filepath = '.'.join(os.path.basename(filename).split('.')[0:-1]) + fname = filepath + '.meta' + mfilename = os.path.join(output_dir, fname) + write_metadata_file(mfilename, ftables, sep) + meta_filenames.append(mfilename) + return meta_filenames + +############################################################################### +def _main_func(): +############################################################################### + """Parse command line, then parse indicated Fortran files. + Finally, generate a prototype metadata file for each Fortran file.""" + args = parse_command_line(sys.argv[1:], __doc__) + verbosity = args.verbose + if verbosity > 1: + set_log_level(_LOGGER, logging.DEBUG) + elif verbosity > 0: + set_log_level(_LOGGER, logging.INFO) + # end if + # Make sure we know where output is going + output_dir = os.path.abspath(args.output_root) + # Optional table separator comment + section_sep = args.section_separator + if not blank_metadata_line(section_sep): + emsg = "Illegal section separator, '{}', first character must be # or ;" + raise CCPPError(emsg.format(section_sep)) + # We need to create a list of input Fortran files + fort_files = create_file_list(args.files, _FORTRAN_FILENAME_EXTENSIONS, + 'Fortran', _LOGGER) + preproc_defs = args.preproc_directives + ## A few sanity checks + ## Make sure output directory is legit + if os.path.exists(output_dir): + if not os.path.isdir(output_dir): + errmsg = "output-root, '{}', is not a directory" + raise CCPPError(errmsg.format(args.output_root)) + # end if + if not os.access(output_dir, os.W_OK): + errmsg = "Cannot write files to output-root ({})" + raise CCPPError(errmsg.format(args.output_root)) + # end if (output_dir is okay) + else: + # Try to create output_dir (let it crash if it fails) + os.makedirs(output_dir) + # end if + # Parse the files and create metadata + _ = parse_fortran_files(fort_files, preproc_defs, + output_dir, section_sep, _LOGGER) + +############################################################################### + +if __name__ == "__main__": + try: + _main_func() + sys.exit(0) + except ParseInternalError as pie: + _LOGGER.exception(pie) + sys.exit(-1) + except CCPPError as ccpp_err: + if _LOGGER.getEffectiveLevel() <= logging.DEBUG: + _LOGGER.exception(ccpp_err) + else: + _LOGGER.error(ccpp_err) + # end if + sys.exit(1) + finally: + logging.shutdown() + # end try diff --git a/scripts/ccpp_prebuild.py b/scripts/ccpp_prebuild.py index c1fea0a0..863e4225 100755 --- a/scripts/ccpp_prebuild.py +++ b/scripts/ccpp_prebuild.py @@ -102,9 +102,6 @@ def import_config(configfile, builddir): config['static_api_dir'] = ccpp_prebuild_config.STATIC_API_DIR.format(build_dir=builddir) config['static_api_srcfile'] = ccpp_prebuild_config.STATIC_API_SRCFILE.format(build_dir=builddir) - # Add model-independent, CCPP-internal variable definition files - config['variable_definition_files'].append(CCPP_INTERNAL_VARIABLE_DEFINITON_FILE) - # To handle new metadata: import DDT references (if exist) try: config['typedefs_new_metadata'] = ccpp_prebuild_config.TYPEDEFS_NEW_METADATA @@ -217,6 +214,8 @@ def convert_local_name_from_new_metadata(metadata, standard_name, typedefs_new_m indices = array_reference.lstrip('(').rstrip(')').split(',') indices_local_names = [] for index_range in indices: + # Remove leading and trailing whitespaces + index_range = index_range.strip() # Leave colons-only dimension alone if index_range == ':': indices_local_names.append(index_range) @@ -225,6 +224,8 @@ def convert_local_name_from_new_metadata(metadata, standard_name, typedefs_new_m dimensions = index_range.split(':') dimensions_local_names = [] for dimension in dimensions: + # Remove leading and trailing whitespaces + dimension = dimension.strip() # Leave literals alone try: int(dimension) diff --git a/scripts/ccpp_state_machine.py b/scripts/ccpp_state_machine.py new file mode 100644 index 00000000..30540fc9 --- /dev/null +++ b/scripts/ccpp_state_machine.py @@ -0,0 +1,25 @@ +"""Definition of the state machine used by the CCPP""" + +# CCPP framework imports +from state_machine import StateMachine + +_INIT_ST = r"(?:(?i)init(?:ial(?:ize)?)?)" +_FINAL_ST = r"(?:(?i)final(?:ize)?)" +_RUN_ST = r"(?:(?i)run)" +_TS_INIT_ST = r"(?:(?i)timestep_init(?:ial(?:ize)?)?)" +_TS_FINAL_ST = r"(?:(?i)timestep_final(?:ize)?)" + +# Allowed CCPP transitions +# pylint: disable=bad-whitespace +RUN_PHASE_NAME = 'run' +CCPP_STATE_MACH = StateMachine((('initialize', 'uninitialized', + 'initialized', _INIT_ST), + ('timestep_initial', 'initialized', + 'in_time_step', _TS_INIT_ST), + (RUN_PHASE_NAME, 'in_time_step', + 'in_time_step', _RUN_ST), + ('timestep_final', 'in_time_step', + 'initialized', _TS_FINAL_ST), + ('finalize', 'initialized', + 'uninitialized', _FINAL_ST))) +# pylint: enable=bad-whitespace diff --git a/scripts/ccpp_suite.py b/scripts/ccpp_suite.py new file mode 100644 index 00000000..a84cf037 --- /dev/null +++ b/scripts/ccpp_suite.py @@ -0,0 +1,2928 @@ +#!/usr/bin/env python +# + +"""Classes and methods to create a Fortran suite-implementation file +to implement calls to a set of suites for a given host model.""" + +# Python library imports +import os.path +import re +import xml.etree.ElementTree as ET +# CCPP framework imports +from ccpp_state_machine import CCPP_STATE_MACH, RUN_PHASE_NAME +from code_block import CodeBlock +from constituents import ConstituentVarDict +from ddt_library import DDTLibrary +from file_utils import KINDS_MODULE +from fortran_tools import FortranWriter +from metavar import Var, VarDictionary, VarLoopSubst, ccpp_standard_var +from metavar import CCPP_CONSTANT_VARS, CCPP_LOOP_VAR_STDNAMES +from parse_tools import ParseContext, ParseSource, context_string +from parse_tools import ParseInternalError, CCPPError +from parse_tools import read_xml_file, validate_xml_file, find_schema_version + +# pylint: disable=too-many-lines + +############################################################################### +# Module (global) variables +############################################################################### + +_OBJ_LOC_RE = re.compile(r"(0x[0-9A-Fa-f]+)>") +_BLANK_DIMS_RE = re.compile(r"[(][:](,:)*[)]$") + +# Source for internally generated variables. +_API_SOURCE_NAME = "CCPP_API" +# Use the constituent source type for consistency +_API_SUITE_VAR_NAME = ConstituentVarDict.constitutent_source_type() +_API_GROUP_VAR_NAME = "group" +_API_SCHEME_VAR_NAME = "scheme" +_API_LOCAL_VAR_NAME = "local" +_API_LOCAL_VAR_TYPES = [_API_LOCAL_VAR_NAME, _API_SUITE_VAR_NAME] +_API_CONTEXT = ParseContext(filename="ccpp_suite.py") +_API_SOURCE = ParseSource(_API_SOURCE_NAME, _API_SCHEME_VAR_NAME, _API_CONTEXT) +_API_LOCAL = ParseSource(_API_SOURCE_NAME, _API_LOCAL_VAR_NAME, _API_CONTEXT) +_API_GROUP = ParseSource(_API_SOURCE_NAME, _API_GROUP_VAR_NAME, _API_CONTEXT) +_API_TIMESPLIT_TAG = 'time_split' +_API_PROCESSSPLIT_TAG = 'process_split' + +# Required variables for inclusion in auto-generated schemes +CCPP_REQUIRED_VARS = [ccpp_standard_var('ccpp_error_flag', + _API_SCHEME_VAR_NAME, + context=_API_CONTEXT), + ccpp_standard_var('ccpp_error_message', + _API_SCHEME_VAR_NAME, + context=_API_CONTEXT)] + +############################################################################### +def new_suite_object(item, context, parent, logger): +############################################################################### + "'Factory' method to create the appropriate suite object from XML" + new_item = None + if item.tag == 'subcycle': + new_item = Subcycle(item, context, parent, logger) + elif item.tag == 'scheme': + new_item = Scheme(item, context, parent, logger) + elif item.tag == _API_TIMESPLIT_TAG: + new_item = TimeSplit(item, context, parent, logger) + else: + raise CCPPError("Unknown CCPP suite element type, '{}'".format(item.tag)) + # end if + return new_item + +############################################################################### + +class CallList(VarDictionary): + """A simple class to hold a routine's call list (dummy arguments)""" + + def __init__(self, name, routine=None, logger=None): + """Initialize this call list. + is the name of this dictionary. + is a pointer to the routine for which this is a call list + or None for a routine that is not a SuiteObject. + """ + self.__routine = routine + super(CallList, self).__init__(name, logger=logger) + + def add_vars(self, call_list, gen_unique=False): + """Add new variables from another CallList ()""" + for var in call_list.variable_list(): + stdname = var.get_prop_value('standard_name') + if stdname not in self: + self.add_variable(var, gen_unique=gen_unique) + # end if + # end for + + def call_string(self, cldicts=None, is_func_call=False, subname=None): + """Return a dummy argument string for this call list. + may be a list of VarDictionary objects to search for + local_names (default is to use self). + should be set to True to construct a call statement. + If is False, construct a subroutine dummy argument + list. + """ + arg_str = "" + arg_sep = "" + for var in self.variable_list(): + # Do not include constants + stdname = var.get_prop_value('standard_name') + if stdname not in CCPP_CONSTANT_VARS: + # Find the dummy argument name + dummy = var.get_prop_value('local_name') + # Now, find the local variable name + if cldicts is not None: + for cldict in cldicts: + dvar = cldict.find_variable(standard_name=stdname, + any_scope=False) + if dvar is not None: + break + # end if + # end for + if dvar is None: + if subname is not None: + errmsg = "{}: ".format(subname) + else: + errmsg = "" + # end if + errmsg += "'{}', not found in call list for '{}'" + clnames = [x.name for x in cldicts] + raise CCPPError(errmsg.format(stdname, clnames)) + # end if + lname = dvar.get_prop_value('local_name') + else: + cldict = None + aref = var.array_ref(local_name=dummy) + if aref is not None: + lname = aref.group(1) + else: + lname = dummy + # end if + # end if + if is_func_call: + if cldicts is not None: + use_dicts = cldicts + else: + use_dicts = [self] + # end if + run_phase = self.routine.run_phase() + # We only need dimensions for suite variables in run phase + need_dims = SuiteObject.is_suite_variable(dvar) and run_phase + vdims = var.call_dimstring(var_dicts=use_dicts, + explicit_dims=need_dims, + loop_subst=run_phase) + if _BLANK_DIMS_RE.match(vdims) is None: + lname = lname + vdims + # end if + # end if + if is_func_call: + arg_str += "{}{}={}".format(arg_sep, dummy, lname) + else: + arg_str += "{}{}".format(arg_sep, lname) + # end if + arg_sep = ", " + # end if + # end for + return arg_str + + @property + def routine(self): + """Return the routine for this call list (or None)""" + return self.__routine + +############################################################################### + +class SuiteObject(VarDictionary): + """Base class for all CCPP Suite objects (e.g., Scheme, Subcycle) + SuiteObjects have an internal dictionary for variables created for + execution of the SuiteObject. These variables will be allocated and + managed at the Group level (unless cross-group usage or persistence + requires handling at the Suite level). + SuiteObjects also have a call list which is a list of variables which + are passed to callable SuiteObjects (e.g., Scheme). + """ + + def __init__(self, name, context, parent, logger, + active_call_list=False, variables=None, phase_type=None): + # pylint: disable=too-many-arguments + self.__name = name + self.__context = context + self.__logger = logger + self.__parent = parent + if active_call_list: + self.__call_list = CallList(name + '_call_list', routine=self, + logger=logger) + else: + self.__call_list = None + # end if + self.__parts = list() + self.__needs_vertical = None + self.__needs_horizontal = None + self.__phase_type = phase_type + # Initialize our dictionary + super(SuiteObject, self).__init__(self.name, variables=variables, + parent_dict=parent, logger=logger) + + def declarations(self): + """Return a list of local variables to be declared in parent Group + or Suite. By default, this list is the object's embedded VarDictionary. + """ + return self.variable_list() + + def add_part(self, item, replace=False): + """Add an object (e.g., Scheme, Subcycle) to this SuiteObject. + If needs to be in a VerticalLoop, look for an appropriate + VerticalLoop object or create one. + if is True, replace in its current position in self. + Note that if is not to be inserted in a VerticalLoop, + has no effect. + """ + if replace: + if item in self.__parts: + index = self.__parts.index(item) + else: + emsg = 'Cannot replace {} in {}, not a member' + raise ParseInternalError(emsg.format(item.name, self.name)) + # end if + else: + if item in self.__parts: + emsg = 'Cannot add {} to {}, already a member' + raise ParseInternalError(emsg.format(item.name, self.name)) + # end if + index = len(self.__parts) + # end if + # Does this item need to be in a VerticalLoop? + if item.needs_vertical is not None: + iparent = item.parent + if isinstance(self, VerticalLoop): + # It is being added to a VerticalLoop, call it good + pass + elif isinstance(iparent, VerticalLoop): + # Why are we doing this? + emsg = ('Trying to add {} {} to {} {} but it is already ' + 'in VerticalLoop {}') + raise ParseInternalError(emsg.format(item.__class__.__name__, + item.name, + self.__class__.__name__, + self.name, iparent.name)) + else: + pitem = iparent.part(-1, error=False) + added = False + if isinstance(pitem, VerticalLoop): + # Can we attach item to this loop? + if pitem.dimension_name == item.needs_vertical: + pitem.add_part(item) + if replace: + self.remove_part(index) + # end if (no else, we already added it) + added = True + # end if + # end if + if not added: + # Need to add item to a new VerticalLoop + # We are in the process of providing the vertical coord + vert_index = item.needs_vertical + item.needs_vertical = None + new_vl = VerticalLoop(vert_index, self.__context, + self, self.__logger, items=[item]) + if replace: + self.remove_part(index) + # end if (no else, adding the loop below) + self.__parts.insert(index, new_vl) + item.reset_parent(new_vl) + # end if + # end if + else: + # Just add + self.__parts.insert(index, item) + item.reset_parent(self) + # end if + + def remove_part(self, index): + """Remove the SuiteObject part at index""" + plen = len(self.__parts) + if (0 <= index < plen) or (abs(index) <= plen): + del self.__parts[index] + else: + errmsg = "Invalid index for remove_part, {}, ".format(index) + if plen > 0: + errmsg += "SuiteObject only has {} parts".format(plen) + else: + errmsg += "SuiteObject only has no parts" + raise ParseInternalError(errmsg, context=self.__context) + # end if + + def schemes(self): + """Return a flattened list of schemes for this SuiteObject""" + schemes = list() + for item in self.__parts: + schemes.extend(item.schemes()) + # end for + return schemes + + def move_part(self, part, source_object, loc=-1): + """Operator to move from to . + If is -1, is appended to , + otherwise, is inserted at . + """ + if part in source_object.parts: + # Sanitize loc + try: + iloc = int(loc) + except ValueError: + errmsg = "Invalid loc value for move_part, {}".format(loc) + raise ParseInternalError(errmsg, context=self.__context) + # end try + if iloc == -1: + self.__parts.append(part) + else: + self.__parts.insert(iloc, part) + # end if + index = source_object.index(part) + source_object.remove_part(index) + # now has a new parent + part.reset_parent(self) + + def reset_parent(self, new_parent): + """Reset the parent of this SuiteObject (which has been moved)""" + self.__parent = new_parent + + def phase(self): + """Return the CCPP state phase_type for this SuiteObject""" + trans = self.phase_type + if trans is None: + if self.parent is not None: + trans = self.parent.phase() + else: + trans = False + # end if + # end if + return trans + + def run_phase(self): + """Return True iff this SuiteObject is in a run phase group""" + return self.phase() == RUN_PHASE_NAME + + def timestep_phase(self): + '''Return True iff this SuiteObject is in a timestep initial or + timestep final phase group''' + phase = self.phase() + return (phase is not None) and ('timestep' in phase) + + def register_action(self, vaction): + """Register (i.e., save information for processing during write stage) + and return True or pass up to the parent of + . Return True if any level registers , False otherwise. + The base class will not register any action, it must be registered in + an override of this method. + """ + if self.parent is not None: + return self.parent.register_action(vaction) + # end if + return False + + @classmethod + def is_suite_variable(cls, var): + """Return True iff belongs to our Suite""" + return var and (var.source.type == _API_SUITE_VAR_NAME) + + def is_local_variable(self, var): + """Return the local variable matching if one is found belonging + to this object or any of its SuiteObject parents.""" + stdname = var.get_prop_value('standard_name') + lvar = None + obj = self + while (not lvar) and (obj is not None) and isinstance(obj, SuiteObject): + lvar = obj.find_variable(standard_name=stdname, any_scope=False, + search_call_list=False) + if not lvar: + obj = obj.parent + # end if + # end while + return lvar + + def add_call_list_variable(self, newvar, exists_ok=False, gen_unique=False, + subst_dict=None): + """Add to this SuiteObject's call_list. If this SuiteObject + does not have a call list, recursively try the SuiteObject's parent + If is not None, create a clone using that as a dictionary + of substitutions. + Do not add if it exists as a local variable. + Do not add if it is a suite variable""" + stdname = newvar.get_prop_value('standard_name') + if self.parent: + pvar = self.parent.find_variable(standard_name=stdname, + source_var=newvar, + any_scope=False) + else: + pvar = None + # end if + if SuiteObject.is_suite_variable(pvar): + pass # Do not add suite variable to a call list + elif self.is_local_variable(newvar): + pass # Do not add to call list, it is owned by a SuiteObject + elif self.call_list is not None: + if (stdname in CCPP_LOOP_VAR_STDNAMES) and (not self.run_phase()): + errmsg = 'Attempting to use loop variable {} in {} phase' + raise CCPPError(errmsg.format(stdname, self.phase())) + # end if + # Do we need a clone? + if isinstance(self, Group): + stype = _API_GROUP_VAR_NAME + else: + stype = None + # end if + if stype or subst_dict: + oldvar = newvar + if subst_dict is None: + subst_dict = {} + # end if + # Make sure that this variable has an intent + if ((oldvar.get_prop_value("intent") is None) and + ("intent" not in subst_dict)): + subst_dict["intent"] = "in" + # end if + newvar = oldvar.clone(subst_dict, source_name=self.name, + source_type=stype, context=self.context) + # end if + self.call_list.add_variable(newvar, exists_ok=exists_ok, + gen_unique=gen_unique, + adjust_intent=True) + # We need to make sure that this variable's dimensions are available + for vardim in newvar.get_dim_stdnames(include_constants=False): + dvar = self.find_variable(standard_name=vardim, + any_scope=True) + if dvar is None: + emsg = "{}: Could not find dimension {} in {}" + raise ParseInternalError(emsg.format(self.name, + stdname, vardim)) + # end if + elif self.parent is None: + errmsg = 'No call_list found for {}'.format(newvar) + raise ParseInternalError(errmsg) + elif pvar: + # Check for call list incompatibility + if pvar is not None: + compat, reason = pvar.compatible(newvar) + if not compat: + emsg = 'Attempt to add incompatible variable to call list:' + emsg += '\n{} from {} is not compatible with {} from {}' + nlreason = newvar.get_prop_value(reason) + plreason = pvar.get_prop_value(reason) + emsg += '\nreason = {} ({} != {})'.format(reason, + nlreason, + plreason) + nlname = newvar.get_prop_value('local_name') + plname = pvar.get_prop_value('local_name') + raise CCPPError(emsg.format(nlname, newvar.source.name, + plname, pvar.source.name)) + # end if + # end if (no else, variable already in call list) + else: + self.parent.add_call_list_variable(newvar, exists_ok=exists_ok, + gen_unique=gen_unique, + subst_dict=subst_dict) + # end if + + def add_variable_to_call_tree(self, var, vmatch=None, subst_dict=None): + """Add to 's call_list (or a parent if does not + have an active call_list). + If is not None, also add the loop substitution variables + which must be present. + If is not None, create a clone using that as a dictionary + of substitutions. + """ + found_dims = False + if var is not None: + self.add_call_list_variable(var, exists_ok=True, gen_unique=True, + subst_dict=subst_dict) + found_dims = True + # end if + if vmatch is not None: + svars = vmatch.has_subst(self, any_scope=True) + if svars is None: + found_dims = False + else: + found_dims = True + for svar in svars: + self.add_call_list_variable(svar, exists_ok=True) + # end for + # Register the action (probably at Group level) + self.register_action(vmatch) + # end if + # end if + return found_dims + + def vert_dim_match(self, vloop_subst): + """If self is or is a part of a VerticalLoop object for + the substitute index for , return the substitute + loop index standard name, otherwise, return None. + """ + dim_match = None + parent = self + if len(vloop_subst.required_stdnames) != 1: + errmsg = 'vert_dim_match can only handle one substitute index' + raise ParseInternalError(errmsg) + # end if + index_dim = vloop_subst.required_stdnames[0] + while parent is not None: + if isinstance(parent, VerticalLoop) and (parent.name == index_dim): + dim_match = index_dim + break + # end if + parent = parent.parent + # end for + return dim_match + + def horiz_dim_match(self, ndim, hdim, nloop_subst): + """Find a match between and , if they are both + horizontal dimensions. + If == , return . + If is not None and its required standard names exist + in our extended dictionary, return them. + Otherwise, return None. + NB: Loop substitutions are only allowed during the run phase but in + other phases, horizontal_dimension and horizontal_loop_extent + are the same. + """ + dim_match = None + nis_hdim = Var.is_horizontal_dimension(ndim) + his_hdim = Var.is_horizontal_dimension(hdim) + if nis_hdim and his_hdim: + if ndim == hdim: + dim_match = ndim + elif self.run_phase() and (nloop_subst is not None): + svars = nloop_subst.has_subst(self, any_scope=True) + match = svars is not None + if match: + if isinstance(self, Scheme): + obj = self.parent + else: + obj = self + # end if + for svar in svars: + obj.add_call_list_variable(svar, exists_ok=True) + # end for + dim_match = ':'.join(nloop_subst.required_stdnames) + # end if + elif not self.run_phase(): + if ((hdim == 'ccpp_constant_one:horizontal_dimension') and + (ndim == 'ccpp_constant_one:horizontal_loop_extent')): + dim_match = hdim + elif ((hdim == 'ccpp_constant_one:horizontal_dimension') and + (ndim == 'horizontal_loop_begin:horizontal_loop_end')): + dim_match = hdim + # end if (no else, there is no non-run-phase match) + # end if (no else, there is no match) + # end if (no else, there is no match) + return dim_match + + @staticmethod + def dim_match(need_dim, have_dim): + """Test whether matches . + If they match, return the matching dimension (which may be + modified by, e.g., a loop substitution). + If they do not match, return None. + """ + match = None + # First, try for all the marbles + if need_dim == have_dim: + match = need_dim + # end if + # Is one side missing a one start? + if not match: + ndims = need_dim.split(':') + hdims = have_dim.split(':') + if len(ndims) > len(hdims): + if ndims[0].lower == 'ccpp_constant_one': + ndims = ndims[1:] + elif hdims[0].lower == 'ccpp_constant_one': + hdims = hdims[1:] + # end if (no else) + # Last try + match = ndims == hdims + # end if + # end if + + return match + + def match_dimensions(self, need_dims, have_dims): + """Compare dimensions between and . + Return 6 items: + 1) Return True if all dims match. + If has a vertical dimension and does not + but all other dimensions match, return False but include the + missing dimension index as the third return value. + 2) Return modified, if necessary to + reflect the available limits. + 3) Return have_dims modified, if necessary to reflect + any loop substitutions. If no substitutions, return None + This is done so that the correct dimensions are used in the host cap. + 4) Return the name of the missing vertical index, or None + 5) Return a permutation array if the dimension ordering is + different (or None if the ordering is the same). Each element of the + permutation array is the index in for that dimension of + . + 6) Finally, return a 'reason' string. If match (first return value) is + False, this string will contain information about the reason for + the match failure. + >>> SuiteObject('foo', _API_CONTEXT, None, None).match_dimensions(['horizontal_loop_extent'], ['horizontal_loop_extent']) + (True, ['horizontal_loop_extent'], ['horizontal_loop_extent'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None, None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type='initialize').match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['ccpp_constant_one:horizontal_dimension']) + (True, ['ccpp_constant_one:horizontal_dimension'], ['ccpp_constant_one:horizontal_dimension'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['horizontal_loop_begin:horizontal_loop_end']) + (True, ['horizontal_loop_begin:horizontal_loop_end'], ['horizontal_loop_begin:horizontal_loop_end'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent'], ['horizontal_loop_begin:horizontal_loop_end','ccpp_constant_one:vertical_layer_dimension']) + (False, ['horizontal_loop_begin:horizontal_loop_end', 'vertical_layer_index'], ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], 'vertical_layer_index', None, 'missing vertical dimension') + >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent','ccpp_constant_one:vertical_layer_dimension'], ['horizontal_loop_begin:horizontal_loop_end','ccpp_constant_one:vertical_layer_dimension']) + (True, ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], None, None, '') + >>> SuiteObject('foo', _API_CONTEXT,None,None,variables=[Var({'local_name':'beg','standard_name':'horizontal_loop_begin','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'end','standard_name':'horizontal_loop_end','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL),Var({'local_name':'lev','standard_name':'vertical_layer_dimension','units':'count','dimensions':'()','type':'integer'}, _API_LOCAL)],active_call_list=True,phase_type=RUN_PHASE_NAME).match_dimensions(['ccpp_constant_one:horizontal_loop_extent','ccpp_constant_one:vertical_layer_dimension'], ['ccpp_constant_one:vertical_layer_dimension','horizontal_loop_begin:horizontal_loop_end']) + (True, ['horizontal_loop_begin:horizontal_loop_end', 'ccpp_constant_one:vertical_layer_dimension'], ['ccpp_constant_one:vertical_layer_dimension', 'horizontal_loop_begin:horizontal_loop_end'], None, [1, 0], '') + """ + new_need_dims = [] + new_have_dims = list(have_dims) + perm = [] + match = True + missing_vert_dim = None + reason = '' + nlen = len(need_dims) + hlen = len(have_dims) + _, nvdim_index = Var.find_vertical_dimension(need_dims) + _, hvdim_index = Var.find_vertical_dimension(have_dims) + _, nhdim_index = Var.find_horizontal_dimension(need_dims) + _, hhdim_index = Var.find_horizontal_dimension(have_dims) + if hhdim_index < 0 <= nhdim_index: + match = False + nlen = 0 # To skip logic below + hlen = 0 # To skip logic below + reason = '{hname}{hctx} is missing a horizontal dimension ' + reason += 'required by {nname}{nctx}' + # end if + for nindex in range(nlen): + neddim = need_dims[nindex] + if nindex == nhdim_index: + # Look for a horizontal dimension match + vmatch = VarDictionary.loop_var_match(neddim) + hmatch = self.horiz_dim_match(neddim, have_dims[hhdim_index], + vmatch) + if hmatch: + perm.append(hhdim_index) + new_need_dims.append(hmatch) + new_have_dims[hhdim_index] = hmatch + found_ndim = True + else: + found_ndim = False + # end if + else: + # Find the first dimension in have_dims that matches neddim + found_ndim = False + if nvdim_index < 0 <= hvdim_index: + skip = hvdim_index + else: + skip = -1 + # end if + hdim_indices = [x for x in range(hlen) + if (x not in perm) and (x != skip)] + for hindex in hdim_indices: + if (hindex != hvdim_index) or (nvdim_index >= 0): + hmatch = self.dim_match(neddim, have_dims[hindex]) + if hmatch: + perm.append(hindex) + new_need_dims.append(hmatch) + new_have_dims[hindex] = hmatch + found_ndim = True + break + # end if + # end if + # end if + # end for + if not found_ndim: + match = False + reason = 'Could not find dimension, ' + neddim + ', in ' + reason += '{hname}{hctx}. Needed by {nname}{nctx}' + break + # end if (no else, we are still okay) + # end for + # Find a missing vertical dimension index, if necessary + if nvdim_index < 0 <= hvdim_index: + # We need to make a substitution for the vertical + # coordinate in have_dims + vvmatch = VarDictionary.loop_var_match(have_dims[hvdim_index]) + if vvmatch: + vmatch_dims = ':'.join(vvmatch.required_stdnames) + # See if the missing vertical dimensions exist + missing_vert_dim = None + for mstdname in vvmatch.required_stdnames: + mvdim = self.find_variable(standard_name=mstdname, + any_scope=True) + if not mvdim: + missing_vert_dim = vmatch_dims + match = False # Should trigger vertical loop action + reason = 'missing vertical dimension' + break + # end if + # end for + # While we have a missing vertical dimension which has been + # created, do NOT enter the substitution into have_dims. + # The supplied variable still has a vertical dimension. + # On the other hand, we *do* need to add the new vertical + # loop index to new_need_dims. Try to put it in the correct + # place for easy calling from the existing variable. + # Also update perm to match the array access + if hvdim_index < len(new_need_dims): + # Insert the vertical loop dimension + if hvdim_index > 0: + before = new_need_dims[0:hvdim_index] + perm_before = perm[0:hvdim_index] + else: + before = [] + perm_before = [] + # end if + after = new_need_dims[hvdim_index:] + new_need_dims = before + [vmatch_dims] + after + perm = perm_before + [hvdim_index] + perm[hvdim_index:] + else: + new_need_dims.append(vmatch_dims) + perm.append(hvdim_index) + # end if + else: + emsg = "Unknown vertical dimension dimension, '{}'" + raise CCPPError(emsg.format(have_dims[hvdim_index])) + # end if + else: + missing_vert_dim = None + # end if + perm_test = list(range(hlen)) + # If no permutation is found, reset to None + if perm == perm_test: + perm = None + elif (not match) and (missing_vert_dim is None): + perm = None + # end if (else, return perm as is) + if new_have_dims == have_dims: + have_dims = None # Do not make any substitutions + # end if + return match, new_need_dims, new_have_dims, missing_vert_dim, perm, reason + + def find_variable(self, standard_name=None, source_var=None, + any_scope=True, clone=None, + search_call_list=False, loop_subst=False): + """Find a matching variable to , create a local clone (if + is True), or return None. + First search the SuiteObject's internal dictionary, then its + call list (unless is True, then any parent + dictionary (if is True). + can be a Var object or a standard_name string. + is not used by this version of . + """ + # First, search our local dictionary + if standard_name is None: + if source_var is None: + emsg = "One of or must be passed." + raise ParseInternalError(emsg) + # end if + standard_name = source_var.get_prop_value('standard_name') + elif source_var is not None: + stest = source_var.get_prop_value('standard_name') + if stest != standard_name: + emsg = (" and must match if " + + "both are passed.") + raise ParseInternalError(emsg) + # end if + # end if + scl = search_call_list + stdname = standard_name + # Don't clone yet, might find the variable further down + found_var = super(SuiteObject, + self).find_variable(standard_name=stdname, + source_var=source_var, + any_scope=False, clone=None, + search_call_list=scl, + loop_subst=loop_subst) + if (not found_var) and (self.call_list is not None) and scl: + # Don't clone yet, might find the variable further down + found_var = self.call_list.find_variable(standard_name=stdname, + source_var=source_var, + any_scope=False, + clone=None, + search_call_list=scl, + loop_subst=loop_subst) + # end if + loop_okay = VarDictionary.loop_var_okay(stdname, self.run_phase()) + if not loop_okay: + loop_subst = False + # end if + if (found_var is None) and any_scope and (self.parent is not None): + # We do not have the variable, look to parents. + found_var = self.parent.find_variable(standard_name=stdname, + source_var=source_var, + any_scope=True, + clone=clone, + search_call_list=scl, + loop_subst=loop_subst) + # end if + return found_var + + def match_variable(self, var, vstdname=None, vdims=None): + """Try to find a source for in this SuiteObject's dictionary + tree. Several items are returned: + found_var: True if a match was found + vert_dim: The vertical dimension in , or None + call_dims: How this variable should be called (or None if no match) + missing_vert: Vertical dim in parent but not in + perm: Permutation (XXgoldyXX: Not yet implemented) + """ + if vstdname is None: + vstdname = var.get_prop_value('standard_name') + # end if + if vdims is None: + vdims = var.get_dimensions() + # end if + if (not vdims) and self.run_phase(): + vmatch = VarDictionary.loop_var_match(vstdname) + else: + vmatch = None + # end if + found_var = False + missing_vert = None + new_vdims = list() + var_vdim = var.has_vertical_dimension(dims=vdims) + # Does this variable exist in the calling tree? + dict_var = self.find_variable(source_var=var, any_scope=True) + if dict_var is None: + # No existing variable but add loop var match to call tree + found_var = self.parent.add_variable_to_call_tree(dict_var, + vmatch=vmatch) + new_vdims = vdims + elif dict_var.source.type in _API_LOCAL_VAR_TYPES: + # We cannot change the dimensions of locally-declared variables + # Using a loop substitution is invalid because the loop variable + # value has not yet been set. + # Therefore, we have to use the declaration dimensions in the call. + found_var = True + new_vdims = dict_var.get_dimensions() + else: + # Check dimensions + dict_dims = dict_var.get_dimensions() + if vdims: + args = self.parent.match_dimensions(vdims, dict_dims) + match, new_vdims, new_dict_dims, missing_vert, perm, err = args + if perm is not None: + errmsg = "Permuted indices are not yet supported" + lname = var.get_prop_value('local_name') + dstr = ', '.join(vdims) + ctx = context_string(var.context) + errmsg += ", var = {}({}){}".format(lname, dstr, ctx) + raise CCPPError(errmsg) + # end if + else: + new_vdims = list() + new_dict_dims = dict_dims + match = True + # end if + # Add the variable to the parent call tree + if dict_dims == new_dict_dims: + sdict = {} + else: + sdict = {'dimensions':new_dict_dims} + # end if + found_var = self.parent.add_variable_to_call_tree(var, + subst_dict=sdict) + if not match: + found_var = False + if not missing_vert: + nctx = context_string(var.context) + nname = var.get_prop_value('local_name') + hctx = context_string(dict_var.context) + hname = dict_var.get_prop_value('local_name') + raise CCPPError(err.format(nname=nname, nctx=nctx, + hname=hname, hctx=hctx)) + # end if + # end if + # end if + # end if + return found_var, var_vdim, new_vdims, missing_vert + + def in_process_split(self): + """Find out if we are in a process-split region""" + proc_split = False + obj = self + while obj is not None: + if isinstance(obj, ProcessSplit): + proc_split = True + break + # end if + if isinstance(obj, TimeSplit): + break + # end if (other object types do not change status) + obj = obj.parent + # end while + return proc_split + + def part(self, index, error=True): + """Return one of this SuiteObject's parts raise an exception, or, + if is False, just return None""" + plen = len(self.__parts) + if (0 <= index < plen) or (abs(index) <= plen): + return self.__parts[index] + # end if + if error: + errmsg = 'No part {} in {} {}'.format(index, + self.__class__.__name__, + self.name) + raise ParseInternalError(errmsg) + # end if + return None + + def has_item(self, item_name): + """Return True iff item, , is already in this SuiteObject""" + has = False + for item in self.__parts: + if item.name == item_name: + has = True + else: + has = item.has_item(item_name) + # end if + if has: + break + # end if + # end for + return has + + @property + def name(self): + """Return the name of the element""" + return self.__name + + @name.setter + def name(self, value): + """Set the name of the element if it has not been set""" + if self.__name is None: + self.__name = value + else: + errmsg = 'Attempt to change name of {} to {}' + raise ParseInternalError(errmsg.format(self, value)) + # end if + + @property + def parent(self): + """This SuiteObject's parent (or none)""" + return self.__parent + + @property + def call_list(self): + """Return the SuiteObject's call_list""" + return self.__call_list + + @property + def phase_type(self): + """Return the phase_type of this suite_object""" + return self.__phase_type + + @property + def parts(self): + """Return a copy the component parts of this SuiteObject. + Returning a copy allows for the part list to be changed during + processing of the return value""" + return self.__parts[:] + + @property + def needs_vertical(self): + """Return the vertical dimension this SuiteObject is missing or None""" + return self.__needs_vertical + + @property + def context(self): + """Return the context of this SuiteObject""" + return self.__context + + @needs_vertical.setter + def needs_vertical(self, value): + """Reset the missing vertical dimension of this SuiteObject""" + if value is None: + self.__needs_vertical = value + elif self.__needs_vertical is not None: + if self.__needs_vertical != value: + errmsg = ('Attempt to change missing vertical dimension ' + 'from {} to {}') + raise ParseInternalError(errmsg.format(self.__needs_vertical, + value)) + # end if (no else, value is already correct) + else: + self.__needs_vertical = value + # end if + + def __repr__(self): + """Create a unique readable string for this Object""" + so_repr = super(SuiteObject, self).__repr__() + olmatch = _OBJ_LOC_RE.search(so_repr) + if olmatch is not None: + loc = ' at {}'.format(olmatch.group(1)) + else: + loc = "" + # end if + return '<{} {}{}>'.format(self.__class__.__name__, self.name, loc) + + def __format__(self, spec): + """Return a string representing the SuiteObject, including its children. + is used between subitems. + is the indent level for multi-line output. + """ + if spec: + sep = spec[0] + else: + sep = '\n' + # end if + try: + ind_level = int(spec[1:]) + except (ValueError, IndexError): + ind_level = 0 + # end try + if sep == '\n': + indent = " " + else: + indent = "" + # end if + if self.name == self.__class__.__name__: + # This object does not have separate name + nstr = self.name + else: + nstr = "{}: {}".format(self.__class__.__name__, self.name) + # end if + output = "{}<{}>".format(indent*ind_level, nstr) + subspec = "{}{}".format(sep, ind_level + 1) + substr = "{o}{s}{p:" + subspec + "}" + subout = "" + for part in self.parts: + subout = substr.format(o=subout, s=sep, p=part) + # end for + if subout: + output = "{}{}{}{}".format(output, subout, sep, + indent*ind_level, + self.__class__.__name__) + else: + output = "{}".format(output, self.__class__.__name__) + # end if + return output + +############################################################################### + +class Scheme(SuiteObject): + """A single scheme in a suite (e.g., init method)""" + + def __init__(self, scheme_xml, context, parent, logger): + """Initialize this physics Scheme""" + name = scheme_xml.text + self.__subroutine_name = None + self.__context = context + self.__version = scheme_xml.get('version', None) + self.__lib = scheme_xml.get('lib', None) + self.__has_vertical_dimension = False + self.__group = None + super(Scheme, self).__init__(name, context, parent, + logger, active_call_list=True) + + def update_group_call_list_variable(self, var): + """If is in our group's call list, update its intent. + Add to our group's call list unless: + - is in our group's call list + - is in our group's dictionary, + - is a suite variable""" + stdname = var.get_prop_value('standard_name') + my_group = self.__group + gvar = my_group.call_list.find_variable(standard_name=stdname, + any_scope=False) + if gvar: + gvar.adjust_intent(var) + else: + gvar = my_group.find_variable(standard_name=stdname, + any_scope=False) + if gvar is None: + # Check for suite variable + gvar = my_group.find_variable(standard_name=stdname, + any_scope=True) + if gvar and (not SuiteObject.is_suite_variable(gvar)): + gvar = None + # end if + if gvar is None: + my_group.add_call_list_variable(var) + # end if + # end if + + def is_local_variable(self, var): + """Return None as we never consider to be in our local + dictionary. + This is an override of the SuiteObject version""" + return None + + def analyze(self, phase, group, scheme_library, suite_vars, level, logger): + """Analyze the scheme's interface to prepare for writing""" + self.__group = group + my_header = None + if self.name in scheme_library: + func = scheme_library[self.name] + if phase in func: + my_header = func[phase] + self.__subroutine_name = my_header.title + # end if + else: + estr = 'No schemes found for {}' + raise ParseInternalError(estr.format(self.name), + context=self.__context) + # end if + if my_header is None: + estr = 'No {} header found for scheme, {}' + raise ParseInternalError(estr.format(phase, self.name), + context=self.__context) + # end if + if my_header.module is None: + estr = 'No module found for subroutine, {}' + raise ParseInternalError(estr.format(self.subroutine_name), + context=self.__context) + # end if + scheme_mods = set() + scheme_mods.add((my_header.module, self.subroutine_name)) + for var in my_header.variable_list(): + vstdname = var.get_prop_value('standard_name') + def_val = var.get_prop_value('default_value') + vdims = var.get_dimensions() + vintent = var.get_prop_value('intent') + args = self.match_variable(var, vstdname=vstdname, vdims=vdims) + found, vert_dim, new_dims, missing_vert = args + if found: + if not self.has_vertical_dim: + self.__has_vertical_dimension = vert_dim is not None + # end if + # We have a match, make sure var is in call list + if new_dims == vdims: + self.add_call_list_variable(var, exists_ok=True) + self.update_group_call_list_variable(var) + else: + subst_dict = {'dimensions':new_dims} + clone = var.clone(subst_dict) + self.add_call_list_variable(clone, exists_ok=True) + self.update_group_call_list_variable(clone) + # end if + else: + if missing_vert is not None: + # This Scheme needs to be in a VerticalLoop + self.needs_vertical = missing_vert + break # Deal with this and come back + # end if + if vintent == 'out': + if self.__group is None: + errmsg = 'Group not defined for {}'.format(self.name) + raise ParseInternalError(errmsg) + # end if + # The Group will manage this variable + self.__group.manage_variable(var) + self.add_call_list_variable(var) + elif def_val and (vintent != 'out'): + if self.__group is None: + errmsg = 'Group not defined for {}'.format(self.name) + raise ParseInternalError(errmsg) + # end if + # The Group will manage this variable + self.__group.manage_variable(var) + # We still need it in our call list (the group uses a clone) + self.add_call_list_variable(var) + else: + errmsg = 'Input argument for {}, {}, not found.' + if self.find_variable(source_var=var) is not None: + # The variable exists, maybe it is dim mismatch + lname = var.get_prop_value('local_name') + emsg = '\nCheck for dimension mismatch in {}' + errmsg += emsg.format(lname) + # end if + if ((not self.run_phase()) and + (vstdname in CCPP_LOOP_VAR_STDNAMES)): + emsg = '\nLoop variables not allowed in {} phase.' + errmsg += emsg.format(self.phase()) + # end if + raise CCPPError(errmsg.format(self.subroutine_name, + vstdname)) + # end if + # end if + # end for + if self.needs_vertical is not None: + self.parent.add_part(self, replace=True) # Should add a vloop + if isinstance(self.parent, VerticalLoop): + # Restart the loop analysis + scheme_mods = self.parent.analyze(phase, group, scheme_library, + suite_vars, level, logger) + # end if + # end if + return scheme_mods + + def write(self, outfile, logger, errflg, indent): + # Unused arguments are for consistent write interface + # pylint: disable=unused-argument + """Write code to call this Scheme to """ + # Dictionaries to try are our group, the group's call list, + # or our module + cldicts = [self.__group, self.__group.call_list] + cldicts.extend(self.__group.suite_dicts()) + my_args = self.call_list.call_string(cldicts=cldicts, + is_func_call=True, + subname=self.subroutine_name) + stmt = 'call {}({})' + outfile.write('if ({} == 0) then'.format(errflg), indent) + outfile.write(stmt.format(self.subroutine_name, my_args), indent+1) + outfile.write('end if', indent) + + def schemes(self): + """Return self as a list for consistency with subcycle""" + return [self] + + def variable_list(self, recursive=False, + std_vars=True, loop_vars=True, consts=True): + """Return a list of all variables for this Scheme. + Because Schemes do not have any variables, return a list + of this object's CallList variables instead. + Note that because of this, is not allowed.""" + if recursive: + raise ParseInternalError("recursive=True not allowed for Schemes") + # end if + return self.call_list.variable_list(recursive=recursive, + std_vars=std_vars, + loop_vars=loop_vars, consts=consts) + + @property + def subroutine_name(self): + """Return this scheme's actual subroutine name""" + return self.__subroutine_name + + @property + def has_vertical_dim(self): + """Return True if at least one of this Scheme's variables has + a vertical dimension (vertical_layer_dimension or + vertical_interface_dimension) + """ + return self.__has_vertical_dimension + + def __str__(self): + """Create a readable string for this Scheme""" + return ''.format(self.name, self.subroutine_name) + +############################################################################### + +class VerticalLoop(SuiteObject): + """Class to call a group of schemes or scheme collections in a + loop over a vertical dimension.""" + + def __init__(self, index_name, context, parent, logger, items=None): + """ is the standard name of the variable holding the + number of iterations (e.g., vertical_layer_dimension).""" + # self._dim_name is the standard name for the number of iterations + self._dim_name = VarDictionary.find_loop_dim_from_index(index_name) + if self._dim_name is None: + errmsg = 'No VerticalLoop dimension name for index = {}' + raise ParseInternalError(errmsg.format(index_name)) + # end if + if ':' in self._dim_name: + dims = self._dim_name.split(':') + if not dims[1]: + errmsg = 'Invalid loop dimension, {}' + raise ParseInternalError(errmsg.format(self._dim_name)) + # end if + self._dim_name = dims[1] + # end if + # self._local_dim_name is the variable name for self._dim_name + self._local_dim_name = None + super(VerticalLoop, self).__init__(index_name, context, parent, logger) + logger.debug("Adding VerticalLoop for '{}'".format(index_name)) + # Add any items + if not isinstance(items, list): + if items is None: + items = list() + else: + items = [items] + # end if + # end if + for item in items: + self.add_part(item) + # end for + + def analyze(self, phase, group, scheme_library, suite_vars, level, logger): + """Analyze the VerticalLoop's interface to prepare for writing""" + # Handle all the suite objects inside of this subcycle + scheme_mods = set() + # Create a variable for the loop index + newvar = Var({'local_name':self.name, 'standard_name':self.name, + 'type':'integer', 'units':'count', 'dimensions':'()'}, + _API_LOCAL) + # The Group will manage this variable + group.manage_variable(newvar) + # Find the loop-extent variable + dim_name = self._dim_name + local_dim = group.find_variable(standard_name=dim_name, any_scope=False) + if local_dim is None: + local_dim = group.call_list.find_variable(standard_name=dim_name, + any_scope=False) + # end if + if local_dim is None: + emsg = 'No variable found for vertical loop dimension {}' + raise ParseInternalError(emsg.format(self._dim_name)) + # end if + self._local_dim_name = local_dim.get_prop_value('local_name') + emsg = "VerticalLoop local name for '{}'".format(self.name) + emsg += " is '{}".format(self.dimension_name) + logger.debug(emsg) + # Analyze our internal items + for item in self.parts: + smods = item.analyze(phase, group, scheme_library, + suite_vars, level+1, logger) + for smod in smods: + scheme_mods.add(smod) + # end for + # end for + return scheme_mods + + def write(self, outfile, logger, errflg, indent): + """Write code for the vertical loop, including contents, to """ + outfile.write('do {} = 1, {}'.format(self.name, self.dimension_name), + indent) + # Note that 'scheme' may be a sybcycle or other construct + for item in self.parts: + item.write(outfile, logger, errflg, indent+1) + # end for + outfile.write('end do', 2) + + @property + def dimension_name(self): + """Return the vertical dimension over which this VerticalLoop loops""" + return self._local_dim_name + +############################################################################### + +class Subcycle(SuiteObject): + """Class to represent a subcycled group of schemes or scheme collections""" + + def __init__(self, sub_xml, context, parent, logger): + name = sub_xml.get('name', None) # Iteration count + loop_extent = sub_xml.get('loop', "1") # Number of iterations + # See if our loop variable is an interger or a variable + try: + loop_int = int(loop_extent) # pylint: disable=unused-variable + self._loop = loop_extent + self._loop_var_int = True + except ValueError: + self._loop_var_int = False + lvar = parent.find_variable(standard_name=self.loop, any_scope=True) + if lvar is None: + emsg = "Subcycle, {}, specifies {} iterations but {} not found" + raise CCPPError(emsg.format(name, self.loop, self.loop)) + # end if + parent.add_call_list_variable(lvar) + # end try + super(Subcycle, self).__init__(name, context, parent, logger) + for item in sub_xml: + new_item = new_suite_object(item, context, self, logger) + self.add_part(new_item) + # end for + + def analyze(self, phase, group, scheme_library, suite_vars, level, logger): + """Analyze the Subcycle's interface to prepare for writing""" + if self.name is None: + self.name = "subcycle_index{}".format(level) + # end if + # Create a variable for the loop index + self.add_variable(Var({'local_name':self.name, + 'standard_name':'loop_variable', + 'type':'integer', 'units':'count', + 'dimensions':'()'}, _API_SOURCE)) + # Handle all the suite objects inside of this subcycle + scheme_mods = set() + for item in self.parts: + smods = item.analyze(phase, group, scheme_library, + suite_vars, level+1, logger) + for smod in smods: + scheme_mods.add(smod) + # end for + # end for + return scheme_mods + + def write(self, outfile, logger, errflg, indent): + """Write code for the subcycle loop, including contents, to """ + outfile.write('do {} = 1, {}'.format(self.name, self.loop), indent) + # Note that 'scheme' may be a sybcycle or other construct + for item in self.parts: + item.write(outfile, logger, errflg, indent+1) + # end for + outfile.write('end do', 2) + + @property + def loop(self): + """Return the loop value or variable local_name""" + lvar = self.find_variable(standard_name=self.loop, any_scope=True) + if lvar is None: + emsg = "Subcycle, {}, specifies {} iterations but {} not found" + raise CCPPError(emsg.format(self.name, self.loop, self.loop)) + # end if + lname = lvar.get_prop_value('local_name') + return lname + +############################################################################### + +class TimeSplit(SuiteObject): + """Class to represent a group of processes to be computed in a time-split + manner -- each parameterization or other construct is called with an + state which has been updated from the previous step. + """ + + def __init__(self, sub_xml, context, parent, logger): + super(TimeSplit, self).__init__('TimeSplit', context, parent, logger) + for part in sub_xml: + new_item = new_suite_object(part, context, self, logger) + self.add_part(new_item) + # end for + + def analyze(self, phase, group, scheme_library, suite_vars, level, logger): + # Unused arguments are for consistent analyze interface + # pylint: disable=unused-argument + """Analyze the TimeSplit's interface to prepare for writing""" + # Handle all the suite objects inside of this group + scheme_mods = set() + for item in self.parts: + smods = item.analyze(phase, group, scheme_library, + suite_vars, level+1, logger) + for smod in smods: + scheme_mods.add(smod) + # end for + # end for + return scheme_mods + + def write(self, outfile, logger, errflg, indent): + """Write code for this TimeSplit section, including contents, + to """ + for item in self.parts: + item.write(outfile, logger, errflg, indent) + # end for + +############################################################################### + +class ProcessSplit(SuiteObject): + """Class to represent a group of processes to be computed in a + process-split manner -- all parameterizations or other constructs are + called with the same state. + NOTE: Currently a stub + """ + + def __init__(self, sub_xml, context, parent, logger): + # Unused arguments are for consistent __init__ interface + # pylint: disable=unused-argument + super(ProcessSplit, self).__init__('ProcessSplit', context, + parent, logger) + raise CCPPError('ProcessSplit not yet implemented') + + def analyze(self, phase, group, scheme_library, suite_vars, level, logger): + # Unused arguments are for consistent analyze interface + # pylint: disable=unused-argument + """Analyze the ProcessSplit's interface to prepare for writing""" + # Handle all the suite objects inside of this group + raise CCPPError('ProcessSplit not yet implemented') + + def write(self, outfile, logger, errflg, indent): + """Write code for this ProcessSplit section, including contents, + to """ + raise CCPPError('ProcessSplit not yet implemented') + +############################################################################### + +class Group(SuiteObject): + """Class to represent a grouping of schemes in a suite + A Group object is implemented as a subroutine callable by the API. + The main arguments to a group are the host model variables. + Additional output arguments are generated from schemes with intent(out) + arguments. + Additional input or inout arguments are generated for inputs needed by + schemes which are produced (intent(out)) by other groups. + """ + + __subhead = ''' + subroutine {subname}({args}) +''' + + __subend = ''' + end subroutine {subname} + +! ======================================================================== +''' + + __thread_check = CodeBlock([('#ifdef _OPENMP', -1), + ('if (omp_get_thread_num() > 1) then', 1), + ('{errflg} = 1', 2), + (('{errmsg} = "Cannot call {phase} routine ' + 'from a threaded region"'), 2), + ('return', 2), + ('end if', 1), + ('#endif', -1)]) + + __process_types = [_API_TIMESPLIT_TAG, _API_PROCESSSPLIT_TAG] + + __process_xml = {} + for gptype in __process_types: + __process_xml[gptype] = '<{ptype}>'.format(ptype=gptype) + # end for + + def __init__(self, group_xml, transition, parent, context, logger): + """Initialize this Group object from . + is the group's phase, is the group's suite. + """ + name = parent.name + '_' + group_xml.get('name') + if transition not in CCPP_STATE_MACH.transitions(): + errmsg = "Bad transition argument to Group, '{}'" + raise ParseInternalError(errmsg.format(transition)) + # end if + # Initialize the dictionary of variables internal to group + super(Group, self).__init__(name, context, parent, + logger, active_call_list=True, + phase_type=transition) + # Add the items but first make sure we know the process type for + # the group (e.g., TimeSplit or ProcessSplit). + if (transition == RUN_PHASE_NAME) and ((not group_xml) or + (group_xml[0].tag not in + Group.__process_types)): + # Default is TimeSplit + tsxml = ET.fromstring(Group.__process_xml[_API_TIMESPLIT_TAG]) + time_split = new_suite_object(tsxml, context, self, logger) + add_to = time_split + self.add_part(time_split) + else: + add_to = self + # end if + # Add the sub objects either directly to the Group or to the TimeSplit + for item in group_xml: + new_item = new_suite_object(item, context, add_to, logger) + add_to.add_part(new_item) + # end for + self._local_schemes = set() + self._host_vars = None + self._host_ddts = None + self._loop_var_matches = list() + self._phase_check_stmts = list() + self._set_state = None + self._ddt_library = None + + def phase_match(self, scheme_name): + """If scheme_name matches the group phase, return the group and + function ID. Otherwise, return None + """ + fid, tid, _ = CCPP_STATE_MACH.transition_match(scheme_name, + transition=self.phase()) + if tid is not None: + return self, fid + # end if + return None, None + + def move_to_call_list(self, standard_name): + """Move a variable from the group internal dictionary to the call list. + This is done when the variable, , will be allocated by + the suite. + """ + gvar = self.find_variable(standard_name=standard_name, any_scope=False) + if gvar is None: + errmsg = "Group {}, cannot move {}, variable not found" + raise ParseInternalError(errmsg.format(self.name, standard_name)) + # end if + self.add_call_list_variable(gvar, exists_ok=True) + self.remove_variable(standard_name) + + def register_action(self, vaction): + """Register any recognized type for use during self.write. + Return True iff is handled. + """ + if isinstance(vaction, VarLoopSubst): + self._loop_var_matches = vaction.add_to_list(self._loop_var_matches) + # Add the missing dim + vaction.add_local(self, _API_LOCAL) + return True + # end if + return False + + def manage_variable(self, newvar): + """Add to our local dictionary making necessary + modifications to the variable properties so that it is + allocated appropriately""" + # Need new prop dict to eliminate unwanted properties (e.g., intent) + vdims = newvar.get_dimensions() + # Look for dimensions where we have a loop substitution and replace + # with the correct size + if self.run_phase(): + hdims = [x.missing_stdname for x in self._loop_var_matches] + else: + # Do not do loop substitutions in full phases + hdims = list() + # end if + for index, dim in enumerate(vdims): + newdim = None + for subdim in dim.split(':'): + if subdim in hdims: + # We have a loop substitution, find and replace + hindex = hdims.index(subdim) + names = self._loop_var_matches[hindex].required_stdnames + newdim = ':'.join(names) + break + # end if + if ('vertical' in subdim) and ('index' in subdim): + # We have a vertical index, replace with correct dimension + errmsg = "vertical index replace not implemented" + raise ParseInternalError(errmsg) + # end if + # end for + if newdim is not None: + vdims[index] = newdim + # end if + # end for + if self.timestep_phase(): + persist = 'timestep' + else: + persist = 'run' + # end if + # Start with an official copy of 's prop_dict with + # corrected dimensions + subst_dict = {'dimensions':vdims} + prop_dict = newvar.copy_prop_dict(subst_dict=subst_dict) + # Add the allocatable items + prop_dict['allocatable'] = len(vdims) > 0 # No need to allocate scalar + prop_dict['persistence'] = persist + # This is a local variable + if 'intent' in prop_dict: + del prop_dict['intent'] + # end if + # Create a new variable, save the original context + local_var = Var(prop_dict, ParseSource(_API_SOURCE_NAME, + _API_LOCAL_VAR_NAME, + newvar.context)) + self.add_variable(local_var, exists_ok=True) + # Finally, make sure all dimensions are accounted for + emsg = self.add_variable_dimensions(local_var, _API_LOCAL_VAR_TYPES, + adjust_intent=True, + to_dict=self.call_list) + if emsg: + raise CCPPError(emsg) + # end if + + def analyze(self, phase, suite_vars, scheme_library, ddt_library, logger): + """Analyze the Group's interface to prepare for writing""" + self._ddt_library = ddt_library + # Sanity check for Group + if phase != self.phase(): + errmsg = 'Group {} has phase {} but analyze is phase {}' + raise ParseInternalError(errmsg.format(self.name, + self.phase(), phase)) + # end if + for item in self.parts: + # Items can be schemes, subcycles or other objects + # All have the same interface and return a set of module use + # statements (lschemes) + lschemes = item.analyze(phase, self, scheme_library, + suite_vars, 1, logger) + for lscheme in lschemes: + self._local_schemes.add(lscheme) + # end for + # end for + self._phase_check_stmts = Suite.check_suite_state(phase) + self._set_state = Suite.set_suite_state(phase) + logger.debug("{}".format(self)) + + def allocate_dim_str(self, dims, context): + """Create the dimension string for an allocate statement""" + rdims = list() + for dim in dims: + rdparts = list() + dparts = dim.split(':') + for dpart in dparts: + dvar = self.find_variable(standard_name=dpart, any_scope=False) + if dvar is None: + dvar = self.call_list.find_variable(standard_name=dpart, + any_scope=False) + if dvar is None: + emsg = "Dimension variable, '{}', not found{}" + lvar = self.find_local_name(dpart, any_scope=True) + if lvar is not None: + emsg += "\nBe sure to use standard names!" + # end if + ctx = context_string(context) + raise CCPPError(emsg.format(dpart, ctx)) + # end if + lname = dvar.get_prop_value('local_name') + rdparts.append(lname) + # end for + rdims.append(':'.join(rdparts)) + # end for + return ', '.join(rdims) + + def find_variable(self, standard_name=None, source_var=None, + any_scope=True, clone=None, + search_call_list=False, loop_subst=False): + """Find a matching variable to , create a local clone (if + is True), or return None. + This purpose of this special Group version is to record any constituent + variable found for processing during the write phase. + """ + fvar = super(Group, + self).find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, clone=clone, + search_call_list=search_call_list, + loop_subst=loop_subst) + if fvar and fvar.is_constituent(): + if fvar.source.type == ConstituentVarDict.constitutent_source_type(): + # We found this variable in the constituent dictionary, + # add it to our call list + self.add_call_list_variable(fvar, exists_ok=True) + # end if + # end if + return fvar + + def write(self, outfile, logger, host_arglist, indent, const_mod, + suite_vars=None, allocate=False, deallocate=False): + """Write code for this subroutine (Group), including contents, + to """ + # Unused arguments are for consistent write interface + # pylint: disable=unused-argument + # group type for (de)allocation + if self.timestep_phase(): + group_type = 'timestep' # Just allocate for the timestep + else: + group_type = 'run' # Allocate for entire run + # end if + # Collect information on local variables + subpart_vars = {} + allocatable_var_set = set() + for item in [self]:# + self.parts: + for var in item.declarations(): + lname = var.get_prop_value('local_name') + if lname in subpart_vars: + if subpart_vars[lname][0].compatible(var): + pass # We already are going to declare this variable + else: + errmsg = "Duplicate Group variable, {}" + raise ParseInternalError(errmsg.format(lname)) + # end if + else: + subpart_vars[lname] = (var, item) + dims = var.get_dimensions() + if (dims is not None) and dims: + allocatable_var_set.add(lname) + # end if + # end if + # end for + # end for + # First, write out the subroutine header + subname = self.name + call_list = self.call_list.call_string() + outfile.write(Group.__subhead.format(subname=subname, args=call_list), + indent) + # Write out any use statements + if self._local_schemes: + modmax = max([len(s[0]) for s in self._local_schemes]) + else: + modmax = 0 + # end if + # Write out the scheme use statements + scheme_use = 'use {},{} only: {}' + for scheme in self._local_schemes: + smod = scheme[0] + sname = scheme[1] + slen = ' '*(modmax - len(smod)) + outfile.write(scheme_use.format(smod, slen, sname), indent+1) + # end for + # Look for any DDT types + call_vars = self.call_list.variable_list() + self._ddt_library.write_ddt_use_statements(call_vars, outfile, + indent+1, pad=modmax) + decl_vars = [x[0] for x in subpart_vars.values()] + self._ddt_library.write_ddt_use_statements(decl_vars, outfile, + indent+1, pad=modmax) + outfile.write('', 0) + # Write out dummy arguments + outfile.write('! Dummy arguments', indent+1) + msg = 'Variables for {}: ({})' + logger.debug(msg.format(self.name, call_vars)) + self.call_list.declare_variables(outfile, indent+1, dummy=True) + if subpart_vars: + outfile.write('\n! Local Variables', indent+1) + # Write out local variables + for key in subpart_vars: + var = subpart_vars[key][0] + spdict = subpart_vars[key][1] + var.write_def(outfile, indent+1, spdict, + allocatable=(key in allocatable_var_set)) + # end for + outfile.write('', 0) + # Get error variable names + verrflg = self.find_variable(standard_name='ccpp_error_flag', + any_scope=True) + if verrflg is not None: + errflg = verrflg.get_prop_value('local_name') + else: + errmsg = "No ccpp_error_flag variable for group, {}" + raise CCPPError(errmsg.format(self.name)) + # end if + verrmsg = self.find_variable(standard_name='ccpp_error_message', + any_scope=True) + if verrmsg is not None: + errmsg = verrmsg.get_prop_value('local_name') + else: + errmsg = "No ccpp_error_message variable for group, {}" + raise CCPPError(errmsg.format(self.name)) + # end if + # Initialize error variables + outfile.write("{} = 0".format(errflg), 2) + outfile.write("{} = ''".format(errmsg), 2) + # Output threaded region check (except for run phase) + if not self.run_phase(): + Group.__thread_check.write(outfile, indent, + {'phase' : self.phase(), + 'errflg' : errflg, 'errmsg' : errmsg}) + # Check state machine + self._phase_check_stmts.write(outfile, indent, + {'errflg' : errflg, 'errmsg' : errmsg, + 'funcname' : self.name}) + # Allocate local arrays + alloc_stmt = "allocate({}({}))" + for lname in allocatable_var_set: + var = subpart_vars[lname][0] + dims = var.get_dimensions() + alloc_str = self.allocate_dim_str(dims, var.context) + outfile.write(alloc_stmt.format(lname, alloc_str), indent+1) + # end for + # Allocate suite vars + if allocate: + for svar in suite_vars.variable_list(): + dims = svar.get_dimensions() + if dims: + timestep_var = svar.get_prop_value('persistence') + if group_type == timestep_var: + alloc_str = self.allocate_dim_str(dims, svar.context) + lname = svar.get_prop_value('local_name') + outfile.write(alloc_stmt.format(lname, alloc_str), + indent+1) + # end if (do not allocate in this phase) + # end if dims (do not allocate scalars) + # end for + # end if + # Write any loop match calculations + for vmatch in self._loop_var_matches: + action = vmatch.write_action(self, dict2=self.call_list) + if action: + outfile.write(action, indent+1) + # end if + # end for + # Write the scheme and subcycle calls + for item in self.parts: + item.write(outfile, logger, errflg, indent + 1) + # end for + # Deallocate local arrays + for lname in allocatable_var_set: + outfile.write('deallocate({})'.format(lname), indent+1) + # end for + # Deallocate suite vars + if deallocate: + for svar in suite_vars.variable_list(): + dims = svar.get_dimensions() + if dims: + timestep_var = svar.get_prop_value('persistence') + if group_type == timestep_var: + lname = svar.get_prop_value('local_name') + outfile.write('deallocate({})'.format(lname), indent+1) + # end if + # end if (no else, do not deallocate scalars) + # end for + # end if + self._set_state.write(outfile, indent, {}) + # end if + outfile.write(Group.__subend.format(subname=subname), indent) + + @property + def suite(self): + """Return this Group's suite""" + return self.parent + + def suite_dicts(self): + """Return a list of this Group's Suite's dictionaries""" + return self.suite.suite_dicts() + +############################################################################### + +class Suite(VarDictionary): + """Class to hold, process, and output a CAP for an entire CCPP suite. + The Suite includes initialization and finalization Group objects as + well as a Group for every suite part.""" + + __state_machine_initial_state = 'uninitialized' + __state_machine_var_name = 'ccpp_suite_state' + + __state_machine_init = ''' +character(len=16) :: {css_var_name} = '{state}' +''' + + # Note that these group names need to match CCPP_STATE_MACH + __initial_group_name = 'initialize' + + __final_group_name = 'finalize' + + __timestep_initial_group_name = 'timestep_initial' + + __timestep_final_group_name = 'timestep_final' + + __scheme_template = '{}' + + def __init__(self, filename, api, logger): + """Initialize this Suite object from the SDF, . + serves as the Suite's parent.""" + self.__logger = logger + self._name = None + self._sdf_name = filename + self._groups = list() + self._suite_init_group = None + self._suite_final_group = None + self._timestep_init_group = None + self._timestep_final_group = None + self.__context = None + self._host_arg_list_full = None + self._host_arg_list_noloop = None + self._module = None + self._ddt_library = None + # Full phases/groups are special groups where the entire state is passed + self._full_groups = {} + self._full_phases = {} + self._gvar_stdnames = {} # Standard names of group-created vars + # Initialize our dictionary + # Create a 'parent' to hold the constituent variables + # The parent for the constituent dictionary is the API. + temp_name = os.path.splitext(os.path.basename(filename))[0] + const_dict = ConstituentVarDict(temp_name+'_constituents', + parent_dict=api, + logger=logger) + super(Suite, self).__init__(self.sdf_name, parent_dict=const_dict, + logger=logger) + if not os.path.exists(self._sdf_name): + emsg = "Suite definition file {0} not found." + raise CCPPError(emsg.format(self._sdf_name)) + # end if + # Parse the SDF + self.parse() + + @property + def name(self): + """Get the name of the suite.""" + return self._name + + @property + def sdf_name(self): + """Get the name of the suite definition file.""" + return self._sdf_name + + @classmethod + def check_suite_state(cls, stage): + """Return a list of CCPP state check statements for """ + check_stmts = list() + if stage in CCPP_STATE_MACH.transitions(): + # We need to make sure we are an allowed previous state + prev_state = CCPP_STATE_MACH.initial_state(stage) + css = "trim({})".format(Suite.__state_machine_var_name) + prev_str = "({} /= '{}')".format(css, prev_state) + check_stmts.append(("if {} then".format(prev_str), 1)) + check_stmts.append(("{errflg} = 1", 2)) + errmsg_str = "write({errmsg}, '(3a)') " + errmsg_str += "\"Invalid initial CCPP state, '\", " + css + ', ' + errmsg_str += "\"' in {funcname}\"" + check_stmts.append((errmsg_str, 2)) + check_stmts.append(("return", 2)) + check_stmts.append(("end if", 1)) + else: + raise ParseInternalError("Unknown stage, '{}'".format(stage)) + # end if + return CodeBlock(check_stmts) + + @classmethod + def set_suite_state(cls, phase): + """Return the code string to set the current suite state to . + If the initial and final states of are identical, return blank. + """ + initial = CCPP_STATE_MACH.initial_state(phase) + final = CCPP_STATE_MACH.final_state(phase) + if initial == final: + stmt = '! Suite state does not change' + else: + stmt = "ccpp_suite_state = '{}'".format(final) + # end if + return CodeBlock([(stmt, 1)]) + + def new_group(self, group_string, transition): + """Create a new Group object from the a XML description""" + if isinstance(group_string, str): + gxml = ET.fromstring(group_string) + else: + gxml = group_string + # end if + group = Group(gxml, transition, self, self.__context, self.__logger) + for svar in CCPP_REQUIRED_VARS: + group.add_call_list_variable(svar) + # end for + if transition != RUN_PHASE_NAME: + self._full_groups[group.name] = group + self._full_phases[group.phase()] = group + # end if + return group + + def new_group_from_name(self, group_name): + '''Create an XML string for Group, , and use it to + create the corresponding group. + Note: must be the a transition string''' + group_xml = ''.format(group_name) + return self.new_group(group_xml, group_name) + + def parse(self): + """Parse the suite definition file.""" + success = True + + _, suite_xml = read_xml_file(self._sdf_name, self.__logger) + # We do not have line number information for the XML file + self.__context = ParseContext(filename=self._sdf_name) + # Validate the XML file + version = find_schema_version(suite_xml) + res = validate_xml_file(self._sdf_name, 'suite', version, self.__logger) + if not res: + emsg = "Invalid suite definition file, '{}'" + raise CCPPError(emsg.format(self._sdf_name)) + # end if + self._name = suite_xml.get('name') + self._module = 'ccpp_{}_cap'.format(self.name) + lmsg = "Reading suite definition file for '{}'" + self.__logger.info(lmsg.format(self.name)) + gname = Suite.__initial_group_name + self._suite_init_group = self.new_group_from_name(gname) + gname = Suite.__final_group_name + self._suite_final_group = self.new_group_from_name(gname) + gname = Suite.__timestep_initial_group_name + self._timestep_init_group = self.new_group_from_name(gname) + gname = Suite.__timestep_final_group_name + self._timestep_final_group = self.new_group_from_name(gname) + # Set up some groupings for later efficiency + self._beg_groups = [self._suite_init_group.name, + self._timestep_init_group.name] + self._end_groups = [self._suite_final_group.name, + self._timestep_final_group.name] + # Build hierarchical structure as in SDF + self._groups.append(self._suite_init_group) + self._groups.append(self._timestep_init_group) + for suite_item in suite_xml: + item_type = suite_item.tag.lower() + # Suite item is a group or a suite-wide init or final method + if item_type == 'group': + # Parse a group + self._groups.append(self.new_group(suite_item, RUN_PHASE_NAME)) + else: + match_trans = CCPP_STATE_MACH.function_match(item_type) + if match_trans is None: + emsg = "Unknown CCPP suite component tag type, '{}'" + raise CCPPError(emsg.format(item_type)) + # end if + if match_trans in self._full_phases: + # Parse a suite-wide initialization scheme + scheme = Scheme(suite_item, self.__context, + self, self.__logger) + self._full_phases[match_trans].add_item(scheme) + else: + emsg = "Unhandled CCPP suite component tag type, '{}'" + raise ParseInternalError(emsg.format(match_trans)) + # end if + # end for + self._groups.append(self._timestep_final_group) + self._groups.append(self._suite_final_group) + return success + + def suite_dicts(self): + """Return a list of this Suite's dictionaries. + A Suite's dictionaries are itself plus its constituent dictionary""" + return [self, self.parent] + + @property + def module(self): + """Get the list of the module generated for this suite.""" + return self._module + + @property + def groups(self): + """Get the list of groups in this suite.""" + return self._groups + + def find_variable(self, standard_name=None, source_var=None, + any_scope=True, clone=None, + search_call_list=False, loop_subst=False): + """Attempt to return the variable matching . + if is None, the standard name from is used. + It is an error to pass both and if + the standard name of is not the same as . + If is True, search parent scopes if not in current scope. + If the variable is not found this Suite's groups are searched for + a matching output variable. If found that variable is promoted to be a + Suite module variable and that variable is returned. + If the variable is not found and is not None, add a clone of + to this dictionary. + If the variable is not found and is None, return None. + """ + # First, see if the variable is already in our path + srch_clist = search_call_list + var = super(Suite, self).find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, + clone=None, + search_call_list=srch_clist, + loop_subst=loop_subst) + if var is None: + # No dice? Check for a group variable which can be promoted + if standard_name in self._gvar_stdnames: + group = self._gvar_stdnames[standard_name] + var = group.find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=False, + search_call_list=srch_clist, + loop_subst=loop_subst) + if var is not None: + # Promote variable to suite level + # Remove this entry to avoid looping back here + del self._gvar_stdnames[standard_name] + # Let everyone know this is now a Suite variable + var.source = ParseSource(_API_SOURCE_NAME, + _API_SUITE_VAR_NAME, + var.context) + self.add_variable(var) + # Remove the variable from the group + group.remove_variable(standard_name) + else: + emsg = ("Group, {}, claimed it had created {} " + "but variable was not found") + raise CCPPError(emsg.format(group.name, standard_name)) + # end if + # end if + # end if + if (var is None) and (clone is not None): + # Guess it is time to clone a different variable + var = super(Suite, self).find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, + clone=clone) + # end if + return var + + def analyze(self, host_model, scheme_library, ddt_library, logger): + """Collect all information needed to write a suite file + >>> CCPP_STATE_MACH.transition_match('init') + 'initialize' + >>> CCPP_STATE_MACH.transition_match('init', transition='finalize') + + >>> CCPP_STATE_MACH.transition_match('INIT') + 'initialize' + >>> CCPP_STATE_MACH.transition_match('initial') + 'initialize' + >>> CCPP_STATE_MACH.transition_match('timestep_initial') + 'timestep_initial' + >>> CCPP_STATE_MACH.transition_match('timestep_initialize') + 'timestep_initial' + >>> CCPP_STATE_MACH.transition_match('timestep_init') + 'timestep_initial' + >>> CCPP_STATE_MACH.transition_match('initialize') + 'initialize' + >>> CCPP_STATE_MACH.transition_match('initialize')[0:4] + 'init' + >>> CCPP_STATE_MACH.transition_match('initize') + + >>> CCPP_STATE_MACH.transition_match('run') + 'run' + >>> CCPP_STATE_MACH.transition_match('finalize') + 'finalize' + >>> CCPP_STATE_MACH.transition_match('finalize')[0:5] + 'final' + >>> CCPP_STATE_MACH.transition_match('final') + 'finalize' + >>> CCPP_STATE_MACH.transition_match('finalize_bar') + + >>> CCPP_STATE_MACH.function_match('foo_init') + ('foo', 'init', 'initialize') + >>> CCPP_STATE_MACH.function_match('foo_init', transition='finalize') + (None, None, None) + >>> CCPP_STATE_MACH.function_match('FOO_INIT') + ('FOO', 'INIT', 'initialize') + >>> CCPP_STATE_MACH.function_match('foo_initial') + ('foo', 'initial', 'initialize') + >>> CCPP_STATE_MACH.function_match('foo_initialize') + ('foo', 'initialize', 'initialize') + >>> CCPP_STATE_MACH.function_match('foo_initialize')[1][0:4] + 'init' + >>> CCPP_STATE_MACH.function_match('foo_initize') + (None, None, None) + >>> CCPP_STATE_MACH.function_match('foo_timestep_initial') + ('foo', 'timestep_initial', 'timestep_initial') + >>> CCPP_STATE_MACH.function_match('foo_timestep_init') + ('foo', 'timestep_init', 'timestep_initial') + >>> CCPP_STATE_MACH.function_match('foo_timestep_initialize') + ('foo', 'timestep_initialize', 'timestep_initial') + >>> CCPP_STATE_MACH.function_match('foo_run') + ('foo', 'run', 'run') + >>> CCPP_STATE_MACH.function_match('foo_finalize') + ('foo', 'finalize', 'finalize') + >>> CCPP_STATE_MACH.function_match('foo_finalize')[1][0:5] + 'final' + >>> CCPP_STATE_MACH.function_match('foo_final') + ('foo', 'final', 'finalize') + >>> CCPP_STATE_MACH.function_match('foo_finalize_bar') + (None, None, None) + >>> CCPP_STATE_MACH.function_match('foo_timestep_final') + ('foo', 'timestep_final', 'timestep_final') + >>> CCPP_STATE_MACH.function_match('foo_timestep_finalize') + ('foo', 'timestep_finalize', 'timestep_final') + """ + self._ddt_library = ddt_library + # Collect all relevant schemes + # For all groups, find associated init and final methods + scheme_set = set() + for group in self.groups: + for scheme in group.schemes(): + scheme_set.add(scheme.name) + # end for + # end for + no_scheme_entries = {} # Skip schemes that are not in this suite + for module in scheme_library: + if module in scheme_set: + scheme_entries = scheme_library[module] + else: + scheme_entries = no_scheme_entries + # end if + for phase in self._full_phases: + if phase in scheme_entries: + header = scheme_entries[phase] + # Add this scheme's init or final routine + pgroup = self._full_phases[phase] + if not pgroup.has_item(header.title): + sstr = Suite.__scheme_template.format(module) + sxml = ET.fromstring(sstr) + scheme = Scheme(sxml, self.__context, pgroup, + self.__logger) + pgroup.add_part(scheme) + # end if (no else, scheme is already in group) + # end if (no else, phase not in scheme set) + # end for + # end for + # Grab the host model argument list + self._host_arg_list_full = host_model.argument_list() + self._host_arg_list_noloop = host_model.argument_list(loop_vars=False) + # First pass, create init, run, and finalize sequences + for item in self.groups: + if item.name in self._full_groups: + phase = self._full_groups[item.name].phase() + else: + phase = RUN_PHASE_NAME + # end if + lmsg = "Group {}, schemes = {}" + self.__logger.debug(lmsg.format(item.name, + [x.name for x in item.schemes()])) + item.analyze(phase, self, scheme_library, ddt_library, logger) + # Look for group variables that need to be promoted to the suite + # We need to promote any variable used later to the suite, however, + # we do not yet know if it will be used. + # Add new group-created variables + gvars = item.variable_list() + for gvar in gvars: + stdname = gvar.get_prop_value('standard_name') + if not stdname in self._gvar_stdnames: + self._gvar_stdnames[stdname] = item + # end if + # end for + # end for + + def is_run_group(self, group): + """Method to separate out run-loop groups from special initial + and final groups + """ + return ((group.name not in self._beg_groups) and + (group.name not in self._end_groups)) + + def max_part_len(self): + """What is the longest suite subroutine name?""" + maxlen = 0 + for spart in self.groups: + if self.is_run_group(spart): + maxlen = max(maxlen, len(spart.name)) + # end if + # end for + return maxlen + + def part_list(self): + """Return list of run phase parts (groups)""" + parts = list() + for spart in self.groups: + if self.is_run_group(spart): + parts.append(spart.name[len(self.name)+1:]) + # end if + # end for + return parts + + def phase_group(self, phase): + """Return the (non-run) group specified by """ + if phase in self._full_phases: + return self._full_phases[phase] + # end if + raise ParseInternalError("Incorrect phase, '{}'".format(phase)) + + def constituent_dictionary(self): + """Return the constituent dictionary for this suite""" + return self.parent + + def write(self, output_dir, logger): + """Create caps for all groups in the suite and for the entire suite + (calling the group caps one after another)""" + # Set name of module and filename of cap + filename = '{module_name}.F90'.format(module_name=self.module) + logger.debug('Writing CCPP suite file, {}'.format(filename)) + # Retrieve the name of the constituent module for Group use statements + const_mod = self.parent.constituent_module_name() + # Init + output_file_name = os.path.join(output_dir, filename) + with FortranWriter(output_file_name, 'w', + "CCPP Suite Cap for {}".format(self.name), + self.module) as outfile: + # Write module 'use' statements here + outfile.write('use {}'.format(KINDS_MODULE), 1) + # Look for any DDT types + self._ddt_library.write_ddt_use_statements(self.values(), + outfile, 1) + # Write out constituent module use statement(s) + const_dict = self.constituent_dictionary() + const_dict.write_suite_use(outfile, 1) + outfile.write_preamble() + outfile.write('! Suite interfaces', 1) + line = Suite.__state_machine_init + var_name = Suite.__state_machine_var_name + var_state = Suite.__state_machine_initial_state + outfile.write(line.format(css_var_name=var_name, + state=var_state), 1) + for group in self._groups: + outfile.write('public :: {}'.format(group.name), 1) + # end for + # Declare constituent public interfaces + const_dict.declare_public_interfaces(outfile, 1) + # Declare constituent private suite interfaces and data + const_dict.declare_private_data(outfile, 1) + outfile.write('\n! Private suite variables', 1) + for svar in self.keys(): + self[svar].write_def(outfile, 1, self, allocatable=True) + # end for + outfile.end_module_header() + for group in self._groups: + if group.name in self._beg_groups: + group.write(outfile, logger, self._host_arg_list_noloop, 1, + const_mod, suite_vars=self, allocate=True) + elif group.name in self._end_groups: + group.write(outfile, logger, self._host_arg_list_noloop, 1, + const_mod, suite_vars=self, deallocate=True) + else: + group.write(outfile, logger, self._host_arg_list_full, 1, + const_mod) + # end if + # end for + err_vars = self.find_error_variables(any_scope=True, + clone_as_out=True) + # Write the constituent properties interface + const_dict.write_constituent_routines(outfile, 1, + self.name, err_vars) + # end with + return output_file_name + +############################################################################### + +class API(VarDictionary): + """Class representing the API for the CCPP framework. + The API class organizes the suites for which CAPS will be generated""" + + __suite_fname = 'ccpp_physics_suite_list' + __part_fname = 'ccpp_physics_suite_part_list' + __vars_fname = 'ccpp_physics_suite_variables' + + __file_desc = "API for {host_model} calls to CCPP suites" + + __preamble = ''' +{module_use} +''' + + __sub_name_template = 'ccpp_physics' + + __subhead = 'subroutine {subname}({api_call_list})' + + __subfoot = 'end subroutine {subname}\n' + + # Note, we cannot add these vars to our dictionary as we do not want + # them showing up in group dummy arg lists + __suite_name = Var({'local_name':'suite_name', + 'standard_name':'suite_name', + 'intent':'in', 'type':'character', + 'kind':'len=*', 'units':'', + 'dimensions':'()'}, _API_SOURCE) + + __suite_part = Var({'local_name':'suite_part', + 'standard_name':'suite_part', + 'intent':'in', 'type':'character', + 'kind':'len=*', 'units':'', + 'dimensions':'()'}, _API_SOURCE) + + def __init__(self, sdfs, host_model, scheme_headers, logger): + """Initialize this API""" + self.__module = 'ccpp_physics_api' + self.__host = host_model + self.__suites = list() + super(API, self).__init__(self.module, parent_dict=self.host_model, + logger=logger) + # Create a usable library out of scheme_headers + # Structure is dictionary of dictionaries + # Top-level dictionary is keyed by function name + # Secondary level is by phase + scheme_library = {} + # First, process DDT headers + self._ddt_lib = DDTLibrary('{}_api'.format(self.host_model.name), + ddts=[d for d in scheme_headers + if d.header_type == 'ddt'], + logger=logger) + for header in [d for d in scheme_headers if d.header_type != 'ddt']: + if header.header_type != 'scheme': + errmsg = "{} is an unknown CCPP API metadata header type, {}" + raise CCPPError(errmsg.format(header.title, header.header_type)) + # end if + func_id, _, match_trans = CCPP_STATE_MACH.function_match(header.title) + if func_id not in scheme_library: + scheme_library[func_id] = {} + # end if + func_entry = scheme_library[func_id] + if match_trans not in func_entry: + func_entry[match_trans] = header + else: + errmsg = "Duplicate scheme entry, {}" + raise CCPPError(errmsg.format(header.title)) + # end if + # end for + # Turn the SDF files into Suites + for sdf in sdfs: + suite = Suite(sdf, self, logger) + suite.analyze(self.host_model, scheme_library, self._ddt_lib, logger) + self.__suites.append(suite) + # end for + # We will need the correct names for errmsg and errflg + evar = self.host_model.find_variable(standard_name='ccpp_error_message') + subst_dict = {'intent':'out'} + if evar is not None: + self._errmsg_var = evar.clone(subst_dict) + else: + raise CCPPError('Required variable, ccpp_error_message, not found') + # end if + evar = self.host_model.find_variable(standard_name='ccpp_error_flag') + if evar is not None: + self._errflg_var = evar.clone(subst_dict) + else: + raise CCPPError('Required variable, ccpp_error_flag, not found') + # end if + # We need a call list for every phase + self.__call_lists = {} + for phase in CCPP_STATE_MACH.transitions(): + self.__call_lists[phase] = CallList('API_' + phase, logger=logger) + self.__call_lists[phase].add_variable(self.suite_name_var) + if phase == RUN_PHASE_NAME: + self.__call_lists[phase].add_variable(self.suite_part_var) + # end if + for suite in self.__suites: + for group in suite.groups: + if group.phase() == phase: + self.__call_lists[phase].add_vars(group.call_list, + gen_unique=True) + # end if + # end for + # end for + # end for + + @classmethod + def interface_name(cls, phase): + 'Return the name of an API interface function' + return "{}_{}".format(cls.__sub_name_template, phase) + + def call_list(self, phase): + "Return the appropriate API call list variables" + if phase in self.__call_lists: + return self.__call_lists[phase] + # end if + raise ParseInternalError("Illegal phase, '{}'".format(phase)) + + def write(self, output_dir, logger): + """Write CCPP API module""" + if not self.suites: + raise CCPPError("No suite specified for generating API") + # end if + api_filenames = list() + # Write out the suite files + for suite in self.suites: + out_file_name = suite.write(output_dir, logger) + api_filenames.append(out_file_name) + # end for + return api_filenames + + @classmethod + def declare_inspection_interfaces(cls, ofile): + """Declare the API interfaces for the suite inquiry functions""" + ofile.write("public :: {}".format(API.__suite_fname), 1) + ofile.write("public :: {}".format(API.__part_fname), 1) + ofile.write("public :: {}".format(API.__vars_fname), 1) + + def get_errinfo_names(self): + """Return a tuple of error output local names""" + errmsg_name = self._errmsg_var.get_prop_value('local_name') + errflg_name = self._errflg_var.get_prop_value('local_name') + return (errmsg_name, errflg_name) + + @staticmethod + def write_var_set_loop(ofile, varlist_name, var_list, indent, + add_allocate=True, start_index=1, start_var=None): + """Write code to allocate (if is True) and set + to . Elements of are set + beginning at . + """ + if add_allocate: + ofile.write("allocate({}({}))".format(varlist_name, len(var_list)), + indent) + # end if + for ind, var in enumerate(var_list): + if start_var: + ind_str = "{} + {}".format(start_var, ind + start_index) + else: + ind_str = "{}".format(ind + start_index) + # end if + ofile.write("{}({}) = '{}'".format(varlist_name, ind_str, var), + indent) + # end for + + def write_inspection_routines(self, ofile): + """Write the list_suites and list_suite_parts subroutines""" + errmsg_name, errflg_name = self.get_errinfo_names() + ofile.write("subroutine {}(suites)".format(API.__suite_fname), 1) + nsuites = len(self.suites) + oline = "character(len=*), allocatable, intent(out) :: suites(:)" + ofile.write(oline, 2) + ofile.write("\nallocate(suites({}))".format(nsuites), 2) + for ind, suite in enumerate(self.suites): + ofile.write("suites({}) = '{}'".format(ind+1, suite.name), 2) + # end for + ofile.write("end subroutine {}".format(API.__suite_fname), 1) + # Write out the suite part list subroutine + oline = "suite_name, part_list, {errmsg}, {errflg}" + inargs = oline.format(errmsg=errmsg_name, errflg=errflg_name) + ofile.write("\nsubroutine {}({})".format(API.__part_fname, inargs), 1) + oline = "character(len=*), intent(in) :: suite_name" + ofile.write(oline, 2) + oline = "character(len=*), allocatable, intent(out) :: part_list(:)" + ofile.write(oline, 2) + self._errmsg_var.write_def(ofile, 2, self) + self._errflg_var.write_def(ofile, 2, self) + else_str = '' + ename = self._errflg_var.get_prop_value('local_name') + ofile.write("{} = 0".format(ename), 2) + ename = self._errmsg_var.get_prop_value('local_name') + ofile.write("{} = ''".format(ename), 2) + for suite in self.suites: + oline = "{}if(trim(suite_name) == '{}') then" + ofile.write(oline.format(else_str, suite.name), 2) + API.write_var_set_loop(ofile, 'part_list', suite.part_list(), 3) + else_str = 'else ' + # end for + ofile.write("else", 2) + emsg = "write({errmsg}, '(3a)')".format(errmsg=errmsg_name) + emsg += "'No suite named ', trim(suite_name), ' found'" + ofile.write(emsg, 3) + ofile.write("{errflg} = 1".format(errflg=errflg_name), 3) + ofile.write("end if", 2) + ofile.write("end subroutine {}".format(API.__part_fname), 1) + # Write out the suite required variable subroutine + oline = "suite_name, variable_list, {errmsg}, {errflg}" + oline += ", input_vars, output_vars, struct_elements" + inargs = oline.format(errmsg=errmsg_name, errflg=errflg_name) + ofile.write("\nsubroutine {}({})".format(API.__vars_fname, inargs), 1) + ofile.write("! Dummy arguments", 2) + oline = "character(len=*), intent(in) :: suite_name" + ofile.write(oline, 2) + oline = "character(len=*), allocatable, intent(out) :: variable_list(:)" + ofile.write(oline, 2) + self._errmsg_var.write_def(ofile, 2, self, extra_space=22) + self._errflg_var.write_def(ofile, 2, self, extra_space=22) + oline = "logical, optional, intent(in) :: input_vars" + ofile.write(oline, 2) + oline = "logical, optional, intent(in) :: output_vars" + ofile.write(oline, 2) + oline = "logical, optional, intent(in) :: struct_elements" + ofile.write(oline, 2) + ofile.write("! Local variables", 2) + ofile.write("logical {}:: input_vars_use".format(' '*34), 2) + ofile.write("logical {}:: output_vars_use".format(' '*34), 2) + ofile.write("logical {}:: struct_elements_use".format(' '*34), 2) + ofile.write("integer {}:: num_vars".format(' '*34), 2) + ofile.write("", 0) + ename = self._errflg_var.get_prop_value('local_name') + ofile.write("{} = 0".format(ename), 2) + ename = self._errmsg_var.get_prop_value('local_name') + ofile.write("{} = ''".format(ename), 2) + ofile.write("if (present(input_vars)) then", 2) + ofile.write("input_vars_use = input_vars", 3) + ofile.write("else", 2) + ofile.write("input_vars_use = .true.", 3) + ofile.write("end if", 2) + ofile.write("if (present(output_vars)) then", 2) + ofile.write("output_vars_use = output_vars", 3) + ofile.write("else", 2) + ofile.write("output_vars_use = .true.", 3) + ofile.write("end if", 2) + ofile.write("if (present(struct_elements)) then", 2) + ofile.write("struct_elements_use = struct_elements", 3) + ofile.write("else", 2) + ofile.write("struct_elements_use = .true.", 3) + ofile.write("end if", 2) + else_str = '' + for suite in self.suites: + parent = suite.parent + # Collect all the suite variables + oline = "{}if(trim(suite_name) == '{}') then" + input_vars = [set(), set(), set()] # leaves, arrrays, leaf elements + inout_vars = [set(), set(), set()] # leaves, arrrays, leaf elements + output_vars = [set(), set(), set()] # leaves, arrrays, leaf elements + for part in suite.groups: + for var in part.call_list.variable_list(): + stdname = var.get_prop_value("standard_name") + intent = var.get_prop_value("intent") + protected = var.get_prop_value("protected") + if (parent is not None) and (not protected): + pvar = parent.find_variable(standard_name=stdname) + if pvar is not None: + protected = pvar.get_prop_value("protected") + # end if + # end if + elements = var.intrinsic_elements(check_dict=self.parent) + if (intent == 'in') and (not protected): + if isinstance(elements, list): + input_vars[1].add(stdname) + input_vars[2].update(elements) + else: + input_vars[0].add(stdname) + # end if + elif intent == 'inout': + if isinstance(elements, list): + inout_vars[1].add(stdname) + inout_vars[2].update(elements) + else: + inout_vars[0].add(stdname) + # end if + elif intent == 'out': + if isinstance(elements, list): + output_vars[1].add(stdname) + output_vars[2].update(elements) + else: + output_vars[0].add(stdname) + # end if + # end if + # end for + # end for + # Figure out how many total variables to return and allocate + # variable_list to that size + ofile.write(oline.format(else_str, suite.name), 2) + ofile.write("if (input_vars_use .and. output_vars_use) then", 3) + have_elems = input_vars[2] or inout_vars[2] or output_vars[2] + if have_elems: + ofile.write("if (struct_elements_use) then", 4) + numvars = len(input_vars[0] | input_vars[2] | inout_vars[0] | + inout_vars[2] | output_vars[0] | output_vars[2]) + ofile.write("num_vars = {}".format(numvars), 5) + ofile.write("else", 4) + # end if + numvars = len(input_vars[0] | input_vars[1] | inout_vars[0] | + inout_vars[1] | output_vars[0] | output_vars[1]) + ofile.write("num_vars = {}".format(numvars), 5 if have_elems else 4) + if have_elems: + ofile.write("end if", 4) + # end if + ofile.write("else if (input_vars_use) then", 3) + have_elems = input_vars[2] or inout_vars[2] + if have_elems: + ofile.write("if (struct_elements_use) then", 4) + numvars = len(input_vars[0] | input_vars[2] | + inout_vars[0] | inout_vars[2]) + ofile.write("num_vars = {}".format(numvars), 5) + ofile.write("else", 4) + # end if + numvars = len(input_vars[0] | input_vars[1] | + inout_vars[0] | inout_vars[1]) + ofile.write("num_vars = {}".format(numvars), 5 if have_elems else 4) + if have_elems: + ofile.write("end if", 4) + # end if + ofile.write("else if (output_vars_use) then", 3) + have_elems = inout_vars[2] or output_vars[2] + if have_elems: + ofile.write("if (struct_elements_use) then", 4) + numvars = len(inout_vars[0] | inout_vars[2] | + output_vars[0] | output_vars[2]) + ofile.write("num_vars = {}".format(numvars), 5) + ofile.write("else", 4) + # end if + numvars = len(inout_vars[0] | inout_vars[1] | + output_vars[0] | output_vars[1]) + ofile.write("num_vars = {}".format(numvars), 5 if have_elems else 4) + if have_elems: + ofile.write("end if", 4) + # end if + ofile.write("else", 3) + ofile.write("num_vars = 0", 4) + ofile.write("end if", 3) + ofile.write("allocate(variable_list(num_vars))", 3) + # Now, fill in the variable_list array + # Start with inout variables + elem_start = 1 + leaf_start = 1 + leaf_written_set = inout_vars[0].copy() + elem_written_set = inout_vars[0].copy() + leaf_list = sorted(inout_vars[0]) + if inout_vars[0] or inout_vars[1] or inout_vars[2]: + ofile.write("if (input_vars_use .or. output_vars_use) then", 3) + API.write_var_set_loop(ofile, 'variable_list', leaf_list, 4, + add_allocate=False, + start_index=leaf_start) + # end if + leaf_start += len(leaf_list) + elem_start += len(leaf_list) + # elements which have not been written out + elem_list = sorted(inout_vars[2] - elem_written_set) + elem_written_set = elem_written_set | inout_vars[2] + leaf_list = sorted(inout_vars[1] - leaf_written_set) + leaf_written_set = leaf_written_set | inout_vars[1] + if elem_list or leaf_list: + ofile.write("if (struct_elements_use) then", 4) + API.write_var_set_loop(ofile, 'variable_list', elem_list, 5, + add_allocate=False, + start_index=elem_start) + elem_start += len(elem_list) + ofile.write("num_vars = {}".format(elem_start - 1), 5) + ofile.write("else", 4) + API.write_var_set_loop(ofile, 'variable_list', leaf_list, 5, + add_allocate=False, + start_index=leaf_start) + leaf_start += len(leaf_list) + ofile.write("num_vars = {}".format(leaf_start - 1), 5) + ofile.write("end if", 4) + else: + ofile.write("num_vars = {}".format(len(leaf_written_set)), + 4 if leaf_written_set else 3) + # end if + if inout_vars[0] or inout_vars[1] or inout_vars[2]: + ofile.write("end if", 3) + # end if + # Write input variables + leaf_list = sorted(input_vars[0] - leaf_written_set) + # Are there any output variables which are also input variables + # (e.g., for a different part (group) of the suite)? + # We need to collect them now in case is selected + # but not . + leaf_cross_set = output_vars[0] & input_vars[0] + simp_cross_set = (output_vars[1] & input_vars[1]) - leaf_cross_set + elem_cross_set = (output_vars[2] & input_vars[2]) - leaf_cross_set + # Subtract the variables which have already been written out + leaf_cross_list = sorted(leaf_cross_set - leaf_written_set) + simp_cross_list = sorted(simp_cross_set - leaf_written_set) + elem_cross_list = sorted(elem_cross_set - elem_written_set) + # Next move back to processing the input variables + leaf_written_set = leaf_written_set | input_vars[0] + elem_list = sorted(input_vars[2] - elem_written_set) + elem_written_set = elem_written_set | input_vars[0] | input_vars[2] + have_inputs = elem_list or leaf_list + if have_inputs: + ofile.write("if (input_vars_use) then", 3) + # elements which have not been written out + # end if + API.write_var_set_loop(ofile, 'variable_list', leaf_list, 4, + add_allocate=False, start_var="num_vars", + start_index=1) + if leaf_list: + ofile.write("num_vars = num_vars + {}".format(len(leaf_list)), + 4) + # end if + leaf_start += len(leaf_list) + elem_start += len(leaf_list) + leaf_list = input_vars[1].difference(leaf_written_set) + leaf_written_set.union(input_vars[1]) + if elem_list or leaf_list: + ofile.write("if (struct_elements_use) then", 4) + API.write_var_set_loop(ofile, 'variable_list', elem_list, 5, + add_allocate=False, + start_index=elem_start) + elem_start += len(elem_list) - 1 + ofile.write("num_vars = {}".format(elem_start), 5) + ofile.write("else", 4) + API.write_var_set_loop(ofile, 'variable_list', leaf_list, 5, + add_allocate=False, + start_index=leaf_start) + leaf_start += len(leaf_list) - 1 + ofile.write("num_vars = {}".format(leaf_start), 5) + ofile.write("end if", 4) + # end if + if have_inputs: + ofile.write("end if", 3) + # end if + # Write output variables + leaf_list = sorted(output_vars[0].difference(leaf_written_set)) + leaf_written_set = leaf_written_set.union(output_vars[0]) + elem_written_set = elem_written_set.union(output_vars[0]) + elem_list = sorted(output_vars[2].difference(elem_written_set)) + elem_written_set = elem_written_set.union(output_vars[2]) + have_outputs = elem_list or leaf_list + if have_outputs: + ofile.write("if (output_vars_use) then", 3) + # end if + leaf_start = 1 + API.write_var_set_loop(ofile, 'variable_list', leaf_list, 4, + add_allocate=False, start_var="num_vars", + start_index=leaf_start) + leaf_start += len(leaf_list) + elem_start = leaf_start + leaf_list = output_vars[1].difference(leaf_written_set) + leaf_written_set.union(output_vars[1]) + if elem_list or leaf_list: + ofile.write("if (struct_elements_use) then", 4) + API.write_var_set_loop(ofile, 'variable_list', elem_list, 5, + add_allocate=False, start_var="num_vars", + start_index=elem_start) + elem_start += len(elem_list) + ofile.write("else", 4) + API.write_var_set_loop(ofile, 'variable_list', leaf_list, 5, + add_allocate=False, start_var="num_vars", + start_index=leaf_start) + leaf_start += len(leaf_list) + ofile.write("end if", 4) + # end if + if leaf_cross_list or elem_cross_list: + ofile.write("if (.not. input_vars_use) then", 4) + API.write_var_set_loop(ofile, 'variable_list', leaf_cross_list, + 5, add_allocate=False, + start_var="num_vars", + start_index=leaf_start) + leaf_start += len(leaf_cross_list) + elem_start += len(leaf_cross_list) + if elem_cross_list or simp_cross_list: + ofile.write("if (struct_elements_use) then", 5) + API.write_var_set_loop(ofile, 'variable_list', + elem_cross_list, 6, + add_allocate=False, + start_var="num_vars", + start_index=elem_start) + elem_start += len(elem_list) + ofile.write("else", 5) + API.write_var_set_loop(ofile, 'variable_list', + leaf_cross_list, 6, + add_allocate=False, + start_var="num_vars", + start_index=leaf_start) + leaf_start += len(leaf_list) + ofile.write("end if", 5) + # end if + ofile.write("end if", 4) + if have_outputs: + ofile.write("end if", 3) + # end if + else_str = 'else ' + # end for + ofile.write("else", 2) + emsg = "write({errmsg}, '(3a)')".format(errmsg=errmsg_name) + emsg += "'No suite named ', trim(suite_name), ' found'" + ofile.write(emsg, 3) + ofile.write("{errflg} = 1".format(errflg=errflg_name), 3) + ofile.write("end if", 2) + ofile.write("end subroutine {}".format(API.__vars_fname), 1) + + @property + def module(self): + """Return the module name of the API.""" + return self.__module + + @property + def host_model(self): + """Return the host model which will use this API.""" + return self.__host + + @property + def suite_name_var(self): + "Return the name of the variable specifying the suite to run" + return self.__suite_name + + @property + def suite_part_var(self): + "Return the name of the variable specifying the suite group to run" + return self.__suite_part + + @property + def suites(self): + "Return the list of this API's suites" + return self.__suites + +############################################################################### +if __name__ == "__main__": + # pylint: disable=ungrouped-imports + from parse_tools import init_log, set_log_to_null + LOGGING = init_log('ccpp_suite') + set_log_to_null(LOGGING) + try: + # First, run doctest + import doctest + doctest.testmod() + # Goal: Replace this test with a suite from unit tests + FRAME_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + CAM = os.path.dirname(FRAME_ROOT) + KESSLER = os.path.join(CAM, 'src', 'physics', 'ncar_ccpp', + 'suite_kessler.xml') + if os.path.exists(KESSLER): + _ = Suite(KESSLER, VarDictionary('Kessler'), LOGGING) + else: + print("Cannot find test file, '{}', skipping test".format(KESSLER)) + except CCPPError as suite_error: + print("{}".format(suite_error)) +# end if (no else) diff --git a/scripts/code_block.py b/scripts/code_block.py new file mode 100644 index 00000000..eaf03e3d --- /dev/null +++ b/scripts/code_block.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python +# + +"""Class and methods to create a code block which can then be written +to a file.""" + +# Python library imports +import re +# CCPP framework imports +from parse_tools import ParseContext, ParseSource, context_string +from parse_tools import ParseInternalError + +class CodeBlock(object): + """Class to store a block of code and a method to write it to a file + >>> CodeBlock([]) #doctest: +ELLIPSIS + <__main__.CodeBlock object at 0x...> + >>> CodeBlock(['hi mom']) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: Each element of must contain exactly two items, a code string and a relative indent + >>> CodeBlock([('hi mom')]) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: Each element of must contain exactly two items, a code string and a relative indent + >>> CodeBlock([('hi mom', 'x')]) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: Each element of must contain exactly two items, a code string and a relative indent + >>> CodeBlock([('hi mom', 1)]) #doctest: +ELLIPSIS + <__main__.CodeBlock object at 0x...> + >>> CodeBlock([('hi mom', 1)]).write(outfile, 1, {}) + + >>> CodeBlock([('hi {greet} mom', 1)]).write(outfile, 1, {}) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: 'greet' missing from + >>> CodeBlock([('hi {{greet}} mom', 1)]).write(outfile, 1, {}) + >>> CodeBlock([('{greet} there mom', 1)]).write(outfile, 1, {'greet':'hi'}) + """ + + __var_re = re.compile(r"[{][ ]*([A-Za-z][A-Za-z0-9_]*)[ ]*[}]") + + __fmt_msg = ('Each element of must contain exactly two ' + 'items, a code string and a relative indent') + + def __init__(self, code_list): + """Initialize object with a list of statements. + Capture and store all variables required for output. + Each statement is a tuple consisting of a string and an indent level. + Non-negative indents will be added to a current indent at write time + while negative indents are written with no indentation. + """ + self.__code_block = code_list + self.__write_vars = list() + for line in self.__code_block: + if len(line) != 2: + raise ParseInternalError(CodeBlock.__fmt_msg.format(code_list)) + # end if + stmt = line[0] + if not isinstance(stmt, str): + raise ParseInternalError(CodeBlock.__fmt_msg.format(code_list)) + # end if + if not isinstance(line[1], int): + raise ParseInternalError(CodeBlock.__fmt_msg.format(code_list)) + # end if + beg = 0 + end = len(stmt) + while beg < end: + # Ignore double curly braces + open_double_curly = stmt.find('{{', beg) + close_double_curly = stmt.find('}}', + max(open_double_curly, beg)) + if 0 <= open_double_curly < close_double_curly: + beg = close_double_curly + 2 + else: + match = CodeBlock.__var_re.search(stmt[beg:]) + if match: + self.__write_vars.append(match.group(1)) + beg = stmt.index('}', beg) + 1 + else: + beg = end + 1 + # end if + # end if + # end while + # end for + + def write(self, outfile, indent_level, var_dict): + """Write this object's code block to using + as a basic offset. + Format each line using the variables from . + It is an error for to not contain any variable + indicated in the code block.""" + + for line in self.__code_block: + stmt = line[0] + if indent_level >= 0: + indent = indent_level + line[1] + else: + indent = 0 + # end if + # Check that contains all required items + errmsg = '' + sep = '' + for var in self.__write_vars: + if var not in var_dict: + errmsg += "'{}' missing from ".format(sep, var) + sep = '\n' + # end if + # end for + if errmsg: + raise ParseInternalError(errmsg) + # end if + outfile.write(stmt.format(**var_dict), indent) + # end for + +############################################################################### +if __name__ == "__main__": + # pylint: disable=ungrouped-imports + import doctest + import os + from fortran_tools import FortranWriter + outfile_name = "__code_block_temp.F90" + with FortranWriter(outfile_name, 'w', 'test file', 'test_mod') as outfile: + doctest.testmod() + # end with + if os.path.exists(outfile_name): + os.remove(outfile_name) + # end if diff --git a/scripts/common.py b/scripts/common.py index 5de12ce0..166cb9be 100755 --- a/scripts/common.py +++ b/scripts/common.py @@ -19,6 +19,7 @@ CCPP_STAGES['run'] = 'run' CCPP_STAGES['finalize'] = 'final' +CCPP_CONSTANT_ONE = 'ccpp_constant_one' CCPP_ERROR_FLAG_VARIABLE = 'ccpp_error_flag' CCPP_ERROR_MSG_VARIABLE = 'ccpp_error_message' CCPP_LOOP_COUNTER = 'ccpp_loop_counter' diff --git a/scripts/constituents.py b/scripts/constituents.py new file mode 100644 index 00000000..747b64a3 --- /dev/null +++ b/scripts/constituents.py @@ -0,0 +1,578 @@ +#!/usr/bin/env python + +""" +Class and supporting code to hold all information on CCPP constituent +variables. A constituent variable is defined and maintained by the CCPP +Framework instead of the host model. +The ConstituentVarDict class contains methods to generate the necessary code +to implement this support. +""" + +# Python library imports +from __future__ import print_function +import os +# CCPP framework imports +from file_utils import KINDS_MODULE +from fortran_tools import FortranWriter +from parse_tools import ParseInternalError +from metavar import Var, VarDictionary + +######################################################################## + +CONST_DDT_NAME = "ccpp_model_constituents_t" +CONST_DDT_MOD = "ccpp_constituent_prop_mod" +CONST_PROP_TYPE = "ccpp_constituent_properties_t" + +######################################################################## + +class ConstituentVarDict(VarDictionary): + """A class to hold all the constituent variables for a CCPP Suite. + Also contains methods to generate the necessary code for runtime + allocation and support for these variables. + """ + + __const_prop_array_name = "ccpp_constituent_array" + __const_prop_init_name = "ccpp_constituents_initialized" + __const_prop_init_consts = "ccpp_create_constituent_array" + __const_prop_type_name = "ccpp_constituent_properties_t" + __constituent_type = "suite" + + def __init__(self, name, parent_dict, variables=None, logger=None): + """Create a specialized VarDictionary for constituents. + The main difference is functionality to allocate and support + these variables with special functions for the host model. + The main reason for a separate dictionary is that these are not + proper Suite variables but will belong to the host model at run time. + The feature of the VarDictionary class is required + because this dictionary must be connected to a host model. + """ + super(ConstituentVarDict, self).__init__(name, variables=variables, + parent_dict=parent_dict, + logger=logger) + + def find_variable(self, standard_name=None, source_var=None, + any_scope=True, clone=None, + search_call_list=False, loop_subst=False): + """Attempt to return the variable matching . + if is None, the standard name from is used. + It is an error to pass both and if + the standard name of is not the same as . + If is True, search parent scopes if not in current scope. + Note: Unlike the version of this method, the case for + CCPP_CONSTANT_VARS is not handled -- it should have been handled + by a lower level. + If the variable is not found but is a constituent variable type, + create the variable in this dictionary + Note that although the argument is accepted for consistency, + cloning is not handled at this level. + If the variable is not found and is not a constituent + variable, return None. + """ + if standard_name is None: + if source_var is None: + emsg = "One of or must be passed." + raise ParseInternalError(emsg) + # end if + standard_name = source_var.get_prop_value('standard_name') + elif source_var is not None: + stest = source_var.get_prop_value('standard_name') + if stest != standard_name: + emsg = ("Only one of or may " + + "be passed.") + raise ParseInternalError(emsg) + # end if + # end if + if standard_name in self: + var = self[standard_name] + elif any_scope and (self._parent_dict is not None): + srch_clist = search_call_list + var = self._parent_dict.find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, + clone=None, + search_call_list=srch_clist, + loop_subst=loop_subst) + else: + var = None + # end if + if (var is None) and source_var and source_var.is_constituent(): + # If we did not find the variable and it is a constituent type, + # add a clone of to our dictionary. + # First, maybe do a loop substitution + dims = source_var.get_dimensions() + newdims = list() + for dim in dims: + dstdnames = dim.split(':') + new_dnames = list() + for dstdname in dstdnames: + if dstdname == 'horizontal_loop_extent': + new_dnames.append('horizontal_dimension') + elif dstdname == 'horizontal_loop_end': + new_dnames.append('horizontal_dimension') + elif dstdname == 'horizontal_loop_begin': + new_dnames.append('ccpp_constant_one') + else: + new_dnames.append(dstdname) + # end if + # end for + newdims.append(':'.join(new_dnames)) + # end for + var = source_var.clone({'dimensions' : newdims}, remove_intent=True, + source_type=self.__constituent_type) + self.add_variable(var) + return var + + def declare_public_interfaces(self, outfile, indent): + """Declare the public constituent interfaces. + Declarations are written to at indent, .""" + outfile.write("! Public interfaces for handling constituents", indent) + outfile.write("! Return the number of constituents for this suite", + indent) + outfile.write("public :: {}".format(self.num_consts_funcname()), indent) + outfile.write("! Return the name of a constituent", indent) + outfile.write("public :: {}".format(self.const_name_subname()), indent) + outfile.write("! Copy the data for a constituent", indent) + outfile.write("public :: {}".format(self.copy_const_subname()), indent) + + def declare_private_data(self, outfile, indent): + """Declare private suite module variables and interfaces + to with indent, .""" + outfile.write("! Private constituent module data", indent) + if self: + stmt = "type({}), private, allocatable :: {}(:)" + outfile.write(stmt.format(self.constituent_prop_type_name(), + self.constituent_prop_array_name()), + indent) + # end if + stmt = "logical, private :: {} = .false." + outfile.write(stmt.format(self.constituent_prop_init_name()), indent) + outfile.write("! Private interface for constituents", indent) + stmt = "private :: {}" + outfile.write(stmt.format(self.constituent_prop_init_consts()), indent) + + def _write_init_check(self, outfile, indent, suite_name, + errvar_names, use_errflg): + """Write a check to to make sure the constituent properties + are initialized. Write code to initialize the error variables and/or + set them to error values.""" + outfile.write('', 0) + if use_errflg: + outfile.write("errflg = 0", indent+1) + outfile.write("errmsg = ''", indent+1) + else: + raise ParseInternalError("Alternative to errflg not implemented") + # end if + outfile.write("! Make sure that our constituent array is initialized", + indent+1) + stmt = "if (.not. {}) then" + outfile.write(stmt.format(self.constituent_prop_init_name()), indent+1) + if use_errflg: + outfile.write("errflg = 1", indent+2) + stmt = 'errmsg = "constituent properties not ' + stmt += 'initialized for suite, {}"' + outfile.write(stmt.format(suite_name), indent+2) + outfile.write("end if", indent+1) + # end if (no else until an alternative error mechanism supported) + + def _write_index_check(self, outfile, indent, suite_name, + errvar_names, use_errflg): + """Write a check to to make sure the "index" input + is in bounds. Write code to set error variables if index is + out of bounds.""" + if use_errflg: + if self: + outfile.write("if (index < 1) then", indent+1) + outfile.write("errflg = 1", indent+2) + stmt = "write(errmsg, '(a,i0,a)') 'ERROR: index (',index,') " + stmt += "too small, must be >= 1'" + outfile.write(stmt, indent+2) + stmt = "else if (index > SIZE({})) then" + outfile.write(stmt.format(self.constituent_prop_array_name()), + indent+1) + outfile.write("errflg = 1", indent+2) + stmt = "write(errmsg, '(2(a,i0))') 'ERROR: index (',index,') " + stmt += "too large, must be <= ', SIZE({})" + outfile.write(stmt.format(self.constituent_prop_array_name()), + indent+2) + outfile.write("end if", indent+1) + else: + outfile.write("errflg = 1", indent+1) + stmt = "write(errmsg, '(a,i0,a)') 'ERROR: suite, {}, " + stmt += "has no constituents'" + outfile.write(stmt, indent+1) + # end if + else: + raise ParseInternalError("Alternative to errflg not implemented") + # end if + + def write_constituent_routines(self, outfile, indent, suite_name, err_vars): + """Write the subroutine that, when called allocates and defines the + suite-cap module variable describing the constituent species for + this suite. + Code is written to starting at indent, .""" + # Format our error variables + errvar_names = [x.get_prop_value('local_name') for x in err_vars] + use_errflg = ('errflg' in errvar_names) and ('errmsg' in errvar_names) + errvar_alist = ", ".join([x for x in errvar_names]) + errvar_alist2 = ", {}".format(errvar_alist) if errvar_alist else "" + errvar_call = ", ".join(["{}={}".format(x,x) for x in errvar_names]) + errvar_call2 = ", {}".format(errvar_call) if errvar_call else "" + # Allocate and define constituents + stmt = "subroutine {}({})".format(self.constituent_prop_init_consts(), + errvar_alist) + outfile.write(stmt, indent) + outfile.write("! Allocate and fill the constituent property array", + indent + 1) + outfile.write("! for this suite", indent+1) + outfile.write("! Dummy arguments", indent+1) + for evar in err_vars: + evar.write_def(outfile, indent+1, self, dummy=True) + # end for + if self: + outfile.write("! Local variables", indent+1) + outfile.write("integer :: index", indent+1) + stmt = "allocate({}({}))" + outfile.write(stmt.format(self.constituent_prop_array_name(), + len(self)), indent+1) + outfile.write("index = 0", indent+1) + # end if + for std_name, var in self.items(): + outfile.write("index = index + 1", indent+1) + dims = var.get_dim_stdnames() + if 'vertical_layer_dimension' in dims: + vertical_dim = 'vertical_layer_dimension' + elif 'vertical_interface_dimension' in dims: + vertical_dim = 'vertical_interface_dimension' + else: + vertical_dim = '' + # end if + advect_str = self.TF_string(var.get_prop_value('advected')) + stmt = 'call {}(index)%initialize("{}", "{}", {}{})' + outfile.write(stmt.format(self.constituent_prop_array_name(), + std_name, vertical_dim, advect_str, + errvar_call2), indent+1) + # end for + outfile.write("{} = .true.".format(self.constituent_prop_init_name()), + indent+1) + stmt = "end subroutine {}".format(self.constituent_prop_init_consts()) + outfile.write(stmt, indent) + outfile.write("", 0) + outfile.write("\n! {}\n".format("="*72), 1) + # Return number of constituents + fname = self.num_consts_funcname() + outfile.write("integer function {}({})".format(fname, errvar_alist), + indent) + outfile.write("! Return the number of constituents for this suite", + indent+1) + outfile.write("! Dummy arguments", indent+1) + for evar in err_vars: + evar.write_def(outfile, indent+1, self, dummy=True) + # end for + outfile.write("! Make sure that our constituent array is initialized", + indent+1) + stmt = "if (.not. {}) then" + outfile.write(stmt.format(self.constituent_prop_init_name()), indent+1) + outfile.write("call {}({})".format(self.constituent_prop_init_consts(), + errvar_call), indent+2) + outfile.write("end if", indent+1) + outfile.write("{} = {}".format(fname, len(self)), indent+1) + outfile.write("end function {}".format(fname), indent) + outfile.write("\n! {}\n".format("="*72), 1) + # Return the name of a constituent given an index + stmt = "subroutine {}(index, name_out{})" + outfile.write(stmt.format(self.const_name_subname(), errvar_alist2), + indent) + outfile.write("! Return the name of constituent, ", indent+1) + outfile.write("! Dummy arguments", indent+1) + outfile.write("integer, intent(in) :: index", indent+1) + outfile.write("character(len=*), intent(out) :: name_out", indent+1) + for evar in err_vars: + evar.write_def(outfile, indent+1, self, dummy=True) + # end for + self._write_init_check(outfile, indent, suite_name, + errvar_names, use_errflg) + self._write_index_check(outfile, indent, suite_name, + errvar_names, use_errflg) + if self: + stmt = "call {}(index)%standard_name(name_out{})" + outfile.write(stmt.format(self.constituent_prop_array_name(), + errvar_call2), indent+1) + # end if + outfile.write("end subroutine {}".format(self.const_name_subname()), + indent) + outfile.write("\n! {}\n".format("="*72), 1) + # Copy a consitituent's properties + stmt = "subroutine {}(index, cnst_out{})" + fname = self.copy_const_subname() + outfile.write(stmt.format(fname, errvar_alist2), indent) + outfile.write("! Copy the data for a constituent", indent+1) + outfile.write("! Dummy arguments", indent+1) + outfile.write("integer, intent(in) :: index", indent+1) + stmt = "type({}), intent(out) :: cnst_out" + outfile.write(stmt.format(self.constituent_prop_type_name()), indent+1) + for evar in err_vars: + evar.write_def(outfile, indent+1, self, dummy=True) + # end for + self._write_init_check(outfile, indent, suite_name, + errvar_names, use_errflg) + self._write_index_check(outfile, indent, suite_name, + errvar_names, use_errflg) + if self: + stmt = "cnst_out = {}(index)" + outfile.write(stmt.format(self.constituent_prop_array_name()), + indent+1) + # end if + outfile.write("end subroutine {}".format(fname), indent) + + def constituent_module_name(self): + """Return the name of host model constituent module""" + if not ((self.parent is not None) and + hasattr(self.parent.parent, "constituent_module")): + emsg = "ConstituentVarDict parent not HostModel?" + emsg += "\nparent is '{}'".format(type(self.parent.parent)) + raise ParseInternalError(emsg) + # end if + return self.parent.parent.constituent_module + + def num_consts_funcname(self): + """Return the name of the function which returns the number of + constituents for this suite.""" + return "{}_num_consts".format(self.name) + + def const_name_subname(self): + """Return the name of the routine that returns a constituent's + given an index""" + return "{}_const_name".format(self.name) + + def copy_const_subname(self): + """Return the name of the routine that returns a copy of a + constituent's metadata given an index""" + return "{}_copy_const".format(self.name) + + @staticmethod + def constituent_index_name(standard_name): + """Return the index name associated with """ + return "index_of_{}".format(standard_name) + + @staticmethod + def write_constituent_use_statements(cap, suite_list, indent): + """Write the suite use statements needed by the constituent + initialization routines.""" + maxmod = max([len(s.module) for s in suite_list]) + smod = len(CONST_DDT_MOD) + maxmod = max(maxmod, smod) + use_str = "use {},{} only: {}" + spc = ' '*(maxmod - smod) + cap.write(use_str.format(CONST_DDT_MOD, spc, CONST_PROP_TYPE), indent) + cap.write('! Suite constituent interfaces', indent) + for suite in suite_list: + const_dict = suite.constituent_dictionary() + smod = suite.module + spc = ' '*(maxmod - len(smod)) + fname = const_dict.num_consts_funcname() + cap.write(use_str.format(smod, spc, fname), indent) + fname = const_dict.const_name_subname() + cap.write(use_str.format(smod, spc, fname), indent) + fname = const_dict.copy_const_subname() + cap.write(use_str.format(smod, spc, fname), indent) + # end for + + @staticmethod + def write_host_routines(cap, host, reg_funcname, num_const_funcname, + copy_in_funcname, copy_out_funcname, const_obj_name, + const_names_name, const_indices_name, + suite_list, err_vars): + """Write out the host model routine which will + instantiate constituent fields for all the constituents in . + is a list of the host model's error variables. + Also write out the following routines: + : Number of constituents + : Collect constituent fields for host + : Update constituent fields from host + Output is written to . + """ +# XXgoldyXX: v need to generalize host model error var type support + err_callstr = "errflg=errflg, errmsg=errmsg" +# XXgoldyXX: ^ need to generalize host model error var type support + err_names = [x.get_prop_value('local_name') for x in err_vars] + errvar_str = ', '.join(err_names) + # First up, the registration routine + substmt = "subroutine {}".format(reg_funcname) + stmt = "{}(suite_list, ncols, num_layers, num_interfaces, {})" + stmt = stmt.format(substmt, errvar_str) + cap.write(stmt, 1) + cap.write("! Create constituent object for suites in ", 2) + cap.write("", 0) + ConstituentVarDict.write_constituent_use_statements(cap, suite_list, 2) + cap.write("", 0) + cap.write("! Dummy arguments", 2) + cap.write("character(len=*), intent(in) :: suite_list(:)", 2) + cap.write("integer, intent(in) :: ncols", 2) + cap.write("integer, intent(in) :: num_layers", 2) + cap.write("integer, intent(in) :: num_interfaces", 2) + for evar in err_vars: + evar.write_def(cap, 2, host, dummy=True, add_intent="out") + # end for + cap.write("! Local variables", 2) + spc = ' '*37 + cap.write("integer{} :: num_suite_consts".format(spc), 2) + cap.write("integer{} :: num_consts".format(spc), 2) + cap.write("integer{} :: index".format(spc), 2) + cap.write("integer{} :: field_ind".format(spc), 2) + cap.write("type({}), pointer :: const_prop".format(CONST_PROP_TYPE), 2) + cap.write("", 0) + cap.write("num_consts = 0", 2) + for suite in suite_list: + const_dict = suite.constituent_dictionary() + funcname = const_dict.num_consts_funcname() + cap.write("! Number of suite constants for {}".format(suite.name), + 2) + cap.write("num_suite_consts = {}({})".format(funcname, + errvar_str), 2) + cap.write("num_consts = num_consts + num_suite_consts", 2) + # end for + cap.write("if (errflg == 0) then", 2) + cap.write("! Initialize constituent data and field object", 3) + stmt = "call {}%initialize_table(num_consts)" + cap.write(stmt.format(const_obj_name), 3) + cap.write("end if", 2) + for suite in suite_list: + cap.write("if (errflg == 0) then", 2) + cap.write("! Add {} constituent metadata".format(suite.name), 3) + const_dict = suite.constituent_dictionary() + funcname = const_dict.num_consts_funcname() + cap.write("num_suite_consts = {}({})".format(funcname, + errvar_str), 3) + cap.write("end if", 2) + funcname = const_dict.copy_const_subname() + cap.write("do index = 1, num_suite_consts", 2) + cap.write("allocate(const_prop, stat=errflg)", 3) + cap.write("if (errflg /= 0) then", 3) + cap.write('errmsg = "ERROR allocating const_prop"', 4) + cap.write("end if", 3) + cap.write("if (errflg == 0) then", 3) + stmt = "call {}(index, const_prop, {})" + cap.write(stmt.format(funcname, err_callstr), 4) + cap.write("end if", 3) + cap.write("if (errflg == 0) then", 3) + stmt = "call {}%new_field(const_prop, {})" + cap.write(stmt.format(const_obj_name, err_callstr), 4) + cap.write("end if", 3) + cap.write("nullify(const_prop)", 3) + cap.write("if (errflg /= 0) then", 3) + cap.write("exit", 4) + cap.write("end if", 3) + cap.write("end do", 2) + cap.write("", 0) + # end for + cap.write("if (errflg == 0) then", 2) + stmt = "call {}%lock_table(ncols, num_layers, num_interfaces, {})" + cap.write(stmt.format(const_obj_name, err_callstr), 3) + cap.write("end if", 2) + cap.write("! Set the index for each active constituent", 2) + cap.write("do index = 1, SIZE({})".format(const_indices_name), 2) + stmt = "field_ind = {}%field_index({}(index), {})" + cap.write(stmt.format(const_obj_name, const_names_name, err_callstr), 3) + cap.write("if (field_ind > 0) then", 3) + cap.write("{}(index) = field_ind".format(const_indices_name), 4) + cap.write("else", 3) + cap.write("errflg = 1", 4) + stmt = "errmsg = 'No field index for '//trim({}(index))" + cap.write(stmt.format(const_names_name), 4) + cap.write("end if", 3) + cap.write("if (errflg /= 0) then", 3) + cap.write("exit", 4) + cap.write("end if", 3) + cap.write("end do", 2) + cap.write("end {}".format(substmt), 1) + # Next, write num_consts routine + substmt = "function {}".format(num_const_funcname) + cap.write("", 0) + cap.write("integer {}({})".format(substmt, errvar_str), 1) + cap.write("! Return the number of constituent fields for this run", 2) + cap.write("", 0) + cap.write("! Dummy arguments", 2) + for evar in err_vars: + evar.write_def(cap, 2, host, dummy=True, add_intent="out") + # end for + cap.write("", 0) + cap.write("{} = {}%num_constituents({})".format(num_const_funcname, + const_obj_name, + err_callstr), 2) + cap.write("end {}".format(substmt), 1) + # Next, write copy_in routine + substmt = "subroutine {}".format(copy_in_funcname) + cap.write("", 0) + cap.write("{}(const_array, {})".format(substmt, errvar_str), 1) + cap.write("! Copy constituent field info into ", 2) + cap.write("", 0) + cap.write("! Dummy arguments", 2) + cap.write("real(kind_phys), intent(out) :: const_array(:,:,:)", 2) + for evar in err_vars: + evar.write_def(cap, 2, host, dummy=True, add_intent="out") + # end for + cap.write("", 0) + cap.write("call {}%copy_in(const_array, {})".format(const_obj_name, + err_callstr), 2) + cap.write("end {}".format(substmt), 1) + # Next, write copy_out routine + substmt = "subroutine {}".format(copy_out_funcname) + cap.write("", 0) + cap.write("{}(const_array, {})".format(substmt, errvar_str), 1) + cap.write("! Update constituent field info from ", 2) + cap.write("", 0) + cap.write("! Dummy arguments", 2) + cap.write("real(kind_phys), intent(in) :: const_array(:,:,:)", 2) + for evar in err_vars: + evar.write_def(cap, 2, host, dummy=True, add_intent="out") + # end for + cap.write("", 0) + cap.write("call {}%copy_out(const_array, {})".format(const_obj_name, + err_callstr), 2) + cap.write("end {}".format(substmt), 1) + + @staticmethod + def constitutent_source_type(): + """Return the source type for constituent species""" + return ConstituentVarDict.__constituent_type + + @staticmethod + def constituent_prop_array_name(): + """Return the name of the constituent properties array for this suite""" + return ConstituentVarDict.__const_prop_array_name + + @staticmethod + def constituent_prop_init_name(): + """Return the name of the array initialized flag for this suite""" + return ConstituentVarDict.__const_prop_init_name + + @staticmethod + def constituent_prop_init_consts(): + """Return the name of the routine to initialize the constituent + properties array for this suite""" + return ConstituentVarDict.__const_prop_init_consts + + @staticmethod + def constituent_prop_type_name(): + """Return the name of the derived type which holds constituent + properties.""" + return ConstituentVarDict.__const_prop_type_name + + @staticmethod + def write_suite_use(outfile, indent): + """Write use statements for any modules needed by the suite cap. + The statements are written to at indent, . + """ + omsg = "use ccpp_constituent_prop_mod, only: {}" + cpt_name = ConstituentVarDict.constituent_prop_type_name() + outfile.write(omsg.format(cpt_name), indent) + + @staticmethod + def TF_string(tf_val): + """Return a string of the Fortran equivalent of """ + if tf_val: + tf_str = ".true." + else: + tf_str = ".false." + # end if + return tf_str diff --git a/scripts/ddt_library.py b/scripts/ddt_library.py new file mode 100644 index 00000000..a569e1f8 --- /dev/null +++ b/scripts/ddt_library.py @@ -0,0 +1,325 @@ +#!/usr/bin/env python +# +# Class +# + +"""Module to implement DDT support in the CCPP Framework. +VarDDT is a class to hold all information on a CCPP DDT metadata variable +""" + +# Python library imports +from __future__ import print_function +# CCPP framework imports +from parse_tools import ParseInternalError, CCPPError, context_string +from metavar import Var +from metadata_table import MetadataSection + +############################################################################### + +class VarDDT(Var): + """A class to store a variable that is a component of a DDT (at any + DDT nesting level). + """ + + def __init__(self, new_field, var_ref, logger=None, recur=False): + """Initialize a new VarDDT object. + is the DDT component. + is a Var or VarDDT whose root originates in a model + dictionary. + The structure of the VarDDT object is: + The super class Var object is a copy of the model root Var. + The (a Var) ends up at the end of a VarDDT chain. + """ + self.__field = None + # Grab the info from the root of + source = var_ref.source + super(VarDDT, self).__init__(var_ref, source, context=source.context, + logger=logger) + # Find the correct place for + if isinstance(var_ref, Var): + # We are at a top level DDT var, set our field + self.__field = new_field + else: + # Recurse to find correct (tail) location for + self.__field = VarDDT(new_field, var_ref.field, + logger=logger, recur=True) + # End if + if (not recur) and (logger is not None): + logger.debug('Adding DDT field, {}'.format(self)) + # End if + + def is_ddt(self): + """Return True iff is a DDT type.""" + return True + + def get_parent_prop(self, name): + """Return the Var property value for the parent Var object. + """ + return super(VarDDT, self).get_prop_value(name) + + def get_prop_value(self, name): + """Return the Var property value for the leaf Var object. + """ + if self.field is None: + pvalue = super(VarDDT, self).get_prop_value(name) + else: + pvalue = self.field.get_prop_value(name) + # End if + return pvalue + + def intrinsic_elements(self, check_dict=None): + """Return the Var intrinsic elements for the leaf Var object. + See Var.intrinsic_elem for details + """ + if self.field is None: + pvalue = super(VarDDT, self).intrinsic_elements(check_dict=check_dict) + else: + pvalue = self.field.intrinsic_elements(check_dict=check_dict) + # End if + return pvalue + + def clone(self, subst_dict, source_name=None, source_type=None, + context=None): + """Create a clone of this VarDDT object's leaf Var with properties + from overriding this variable's properties. + may also be a string in which case only the local_name + property is changed (to the value of the string). + The optional , , and inputs + allow the clone to appear to be coming from a designated source, + by default, the source and type are the same as this Var (self). + """ + if self.field is None: + clone_var = super(VarDDT, self).clone(subst_dict, + source_name=source_name, + source_type=source_type, + context=context) + else: + clone_var = self.field.clone(subst_dict, + source_name=source_name, + source_type=source_type, + context=context) + # End if + return clone_var + + def call_string(self, var_dict, loop_vars=None): + """Return a legal call string of this VarDDT's local name sequence. + """ + # XXgoldyXX: Need to add dimensions to this + call_str = super(VarDDT, self).get_prop_value('local_name') + if self.field is not None: + call_str += '%' + self.field.call_string(var_dict, + loop_vars=loop_vars) + # End if + return call_str + + def write_def(self, outfile, indent, ddict, allocatable=False, dummy=False): + """Write the definition line for this DDT. + The type of this declaration is the type of the Var at the + end of the chain of references.""" + if self.field is None: + super(VarDDT, self).write_def(outfile, indent, ddict, + allocatable=allocatable, dummy=dummy) + else: + self.field.write_def(outfile, indent, ddict, + allocatable=allocatable, dummy=dummy) + # End if + + @staticmethod + def __var_rep(var, prefix=""): + """Internal helper function for creating VarDDT representations + Create a call string from the local_name and dimensions of . + Optionally, prepend %. + """ + lname = var.get_prop_value('local_name') + ldims = var.get_prop_value('dimensions') + if ldims: + if prefix: + lstr = '{}%{}({})'.format(prefix, lname, ', '.join(ldims)) + else: + lstr = '{}({})'.format(lname, ', '.join(ldims)) + # End if + else: + if prefix: + lstr = '{}%{}'.format(prefix, lname) + else: + lstr = '{}'.format(lname) + # End if + # End if + return lstr + + def __repr__(self): + """Print representation for VarDDT objects""" + # Note, recursion would be messy because of formatting issues + lstr = "" + sep = "" + field = self + while field is not None: + if isinstance(field, VarDDT): + lstr += sep + self.__var_rep(field.var) + field = field.field + elif isinstance(field, Var): + lstr = self.__var_rep(field, prefix=lstr) + field = None + # End if + sep = '%' + # End while + return "".format(lstr) + + def __str__(self): + """Print string for VarDDT objects""" + return self.__repr__() + + @property + def var(self): + "Return this VarDDT's Var object" + return super(VarDDT, self) + + @property + def field(self): + "Return this objects field object, or None" + return self.__field + +############################################################################### +class DDTLibrary(dict): + """DDTLibrary is a collection of DDT definitions, broken down into + individual fields with metadata. It provides efficient ways to find + the field corresponding to any standard-named field contained in + any of the (potentially nested) included DDT definitions. + The dictionary holds known standard names. + """ + + def __init__(self, name, ddts=None, logger=None): + "Our dict is DDT definition headers, key is type" + self._name = '{}_ddt_lib'.format(name) + self._ddt_fields = {} # DDT field to DDT access map + self._max_mod_name_len = 0 + super(DDTLibrary, self).__init__() + if ddts is None: + ddts = list() + elif not isinstance(ddts, list): + ddts = [ddts] + # End if + # Add all the DDT headers, then process + for ddt in ddts: + if not isinstance(ddt, MetadataSection): + errmsg = 'Invalid DDT metadata type, {}' + raise ParseInternalError(errmsg.format(type(ddt))) + # End if + if not ddt.header_type == 'ddt': + errmsg = 'Metadata table header is for a {}, should be DDT' + raise ParseInternalError(errmsg.format(ddt.header_type)) + # End if + if ddt.title in self: + errmsg = "Duplicate DDT, {}, found{}, original{}" + ctx = context_string(ddt.source.context) + octx = context_string(self[ddt.title].source.context) + raise CCPPError(errmsg.format(ddt.title, ctx, octx)) + # End if + if logger is not None: + lmsg = 'Adding DDT {} to {}' + logger.debug(lmsg.format(ddt.title, self.name)) + # End if + self[ddt.title] = ddt + dlen = len(ddt.module) + if dlen > self._max_mod_name_len: + self._max_mod_name_len = dlen + # End if + # End for + + def check_ddt_type(self, var, header, lname=None): + """If is a DDT, check to make sure it is in this DDT library. + If not, raise an exception. + """ + if var.is_ddt(): + # Make sure we know this DDT type + vtype = var.get_prop_value('type') + if vtype not in self: + if lname is None: + lname = var.get_prop_value('local_name') + # End if + errmsg = 'Variable {} is of unknown type ({}) in {}' + ctx = context_string(var.context) + raise CCPPError(errmsg.format(lname, vtype, header.title, ctx)) + # End if + # End if (no else needed) + + def collect_ddt_fields(self, var_dict, var, ddt=None): + """Add all the reachable fields from DDT variable of type, + to . Each field is added as a VarDDT. + """ + if ddt is None: + vtype = var.get_prop_value('type') + if vtype in self: + ddt = self[vtype] + else: + lname = var.get_prop_value('local_name') + ctx = context_string(var.context) + errmsg = "Variable, {}, is not a known DDT{}" + raise ParseInternalError(errmsg.format(lname, ctx)) + # End if + # End if + for dvar in ddt.variable_list(): + subvar = VarDDT(dvar, var) + dvtype = dvar.get_prop_value('type') + if (dvar.is_ddt()) and (dvtype in self): + # If DDT in our library, we need to add sub-fields recursively. + subddt = self[dvtype] + self.collect_ddt_fields(var_dict, subvar, subddt) + else: + # add_variable only checks the current dictionary. For a + # DDT, the variable also cannot be in our parent dictionaries. + stdname = dvar.get_prop_value('standard_name') + pvar = var_dict.find_variable(standard_name=stdname, + any_scope=True) + if pvar: + emsg = "Attempt to add duplicate DDT sub-variable, {}{}." + emsg += "\nVariable originally defined{}" + ntx = context_string(dvar.context) + ctx = context_string(pvar.context) + raise CCPPError(emsg.format(stdname, ntx, ctx)) + # end if + # Add this intrinsic to + var_dict.add_variable(subvar) + # End for + + def ddt_modules(self, variable_list, ddt_mods=None): + """Collect information for module use statements. + Add module use information (module name, DDT name) for any variable + in which is a DDT in this library. + """ + if ddt_mods is None: + ddt_mods = set() # Need a new set for every call + # End if + for var in variable_list: + vtype = var.get_prop_value('type') + if vtype in self: + module = self[vtype].module + ddt_mods.add((module, vtype)) + # End if + # End for + return ddt_mods + + def write_ddt_use_statements(self, variable_list, outfile, indent, pad=0): + """Write the use statements for all ddt modules needed by + """ + pad = max(pad, self._max_mod_name_len) + ddt_mods = self.ddt_modules(variable_list) + for ddt_mod in ddt_mods: + dmod = ddt_mod[0] + dtype = ddt_mod[1] + slen = ' '*(pad - len(dmod)) + ustring = 'use {},{} only: {}' + outfile.write(ustring.format(dmod, slen, dtype), indent) + # End for + + @property + def name(self): + "Return the name of this DDT library" + return self._name + +############################################################################### +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/scripts/file_utils.py b/scripts/file_utils.py new file mode 100644 index 00000000..8742ef6a --- /dev/null +++ b/scripts/file_utils.py @@ -0,0 +1,317 @@ +#!/usr/bin/env python + +""" +Utilities for checking and manipulating file status +""" + +# Python library imports +from __future__ import absolute_import +from __future__ import unicode_literals + +import filecmp +import glob +import os +# CCPP framework imports +from parse_tools import CCPPError, ParseInternalError +#XXgoldyXX: v Crap required to support python 2 +import sys +# Find python version +PY3 = sys.version_info[0] > 2 +#XXgoldyXX: ^ Crap required to support python 2 + +# Standardize name of generated kinds file and module +KINDS_MODULE = 'ccpp_kinds' +KINDS_FILENAME = '{}.F90'.format(KINDS_MODULE) + +############################################################################### +def check_for_existing_file(filename, description, readable=True): +############################################################################### + """Check for file existence and access. + Return a list of error strings in case + does not exist or does not have read access and is True""" + errors = list() + if os.path.exists(filename): + if readable: + if not os.access(filename, os.R_OK): + errmsg = "No read access to {}, '{}'" + errors.append(errmsg.format(description, filename)) + # end if (no else, everything is fine) + # end if (no else, everything is fine) + else: + errors.append("{}, '{}', must exist".format(description, filename)) + # end if + return errors + +############################################################################### +def check_for_writeable_file(filename, description): +############################################################################### + """If exists but not writable, raise an error. + If does not exist and its directory is not writable, raise + an error. is a description of .""" + if os.path.exists(filename) and not os.access(filename, os.W_OK): + raise CCPPError("Cannot write {}, '{}'".format(description, filename)) + # end if + if not os.access(os.path.dirname(filename), os.W_OK): + raise CCPPError("Cannot write {}, '{}'".format(description, filename)) + # end if (else just return) + +############################################################################### +def add_unique_files(filepath, pdesc, master_list, logger): +############################################################################### + """Add any new files indicated by to . + Check each file for readability. + Log duplicate files + Return a list of errors found + Wildcards in are expanded""" + errors = list() + for file in glob.glob(filepath): + errs = check_for_existing_file(file, pdesc) + if errs: + errors.extend(errs) + elif file in master_list: + lmsg = "WARNING: Ignoring duplicate file, {}" + logger.warning(lmsg.format(file)) + else: + master_list.append(file) + # end if + # end for + return errors + +############################################################################### +def read_pathnames_from_file(pathsfile, file_type): +############################################################################### + """Read and return path names from . + Convert relative pathnames to use 's directory as root. + Also return a list of any errors encountered + """ + # We want to end up with absolute paths, treat as root location + root_path = os.path.dirname(os.path.abspath(pathsfile)) + file_list = list() + pdesc = '{} pathsnames file'.format(file_type) + errors = check_for_existing_file(pathsfile, pdesc) + pdesc = '{} pathname in {}'.format(file_type, pathsfile) + if not errors: + with open(pathsfile, 'r') as infile: + for line in infile.readlines(): + path = line.strip() + # Skip blank lines & lines which appear to start with a comment. + if path and (path[0] not in ['#', '!']): + # Check for an absolute path + if not os.path.isabs(path): + path = os.path.normpath(os.path.join(root_path, path)) + # end if + file_list.append(path) + # end if (else skip blank or comment line) + # end for + # end with open + # end if (no else, we already have the errors) + return file_list, errors + +############################################################################### +def _create_file_list_int(files, suffices, file_type, logger, + txt_files, pathname, root_path, master_list): +############################################################################### + """Create and return a master list of files from . + is a list of pathnames which may include wildcards. + is a list of allowed file types. Filenames in + with an allowed suffix will be added to the master list. + Filenames with a '.txt' suffix will be parsed to look for allowed + filenames. + is a description of the allowed file types. + is a logger used to print warnings (unrecognized filename types) + and debug messages. + is a list of previously-encountered text files (to prevent + infinite recursion). + is the text file name from which was read (if any). + is the list of files which have already been collected + A list of error strings is also returned + """ + errors = list() + if pathname: + pdesc = '{} pathname file, found in {}'.format(file_type, pathname) + else: + pdesc = '{} pathnames file'.format(file_type) + # end if + if not isinstance(files, list): + raise ParseInternalError("'{}' is not a list".format(files)) + # end if + for filename in files: + # suff is filename's extension + suff = os.path.splitext(filename)[1] + if suff: + suff = suff[1:] + # end if + if not os.path.isabs(filename): + filename = os.path.normpath(os.path.join(root_path, filename)) + # end if + if os.path.isdir(filename): + for suff_type in suffices: + file_type = os.path.join(filename, '*.{}'.format(suff_type)) + errs = add_unique_files(file_type, pdesc, master_list, logger) + errors.extend(errs) + # end for + elif suff in suffices: + errs = add_unique_files(filename, pdesc, master_list, logger) + errors.extend(errs) + elif suff == 'txt': + tfiles = glob.glob(filename) + if tfiles: + for file in tfiles: + if file in txt_files: + lmsg = "WARNING: Ignoring duplicate '.txt' file, {}" + logger.warning(lmsg.format(filename)) + else: + lmsg = 'Reading .{} filenames from {}' + logger.debug(lmsg.format(', .'.join(suffices), + file)) + flist, errs = read_pathnames_from_file(file, file_type) + errors.extend(errs) + txt_files.append(file) + root = os.path.dirname(file) + _, errs = _create_file_list_int(flist, suffices, + file_type, logger, + txt_files, file, + root, master_list) + errors.extend(errs) + # end if + # end for + else: + emsg = "{} pathnames file, '{}', does not exist" + errors.append(emsg.format(file_type, filename)) + # end if + else: + lmsg = 'WARNING: Not reading {}, only reading .{} or .txt files' + logger.warning(lmsg.format(filename, ', .'.join(suffices))) + # end if + # end for + + return master_list, errors + +############################################################################### +def create_file_list(files, suffices, file_type, logger, root_path=None): +############################################################################### + """Create and return a master list of files from . + is either a comma-separated string of pathnames or a list. + If a pathname is a directory, all files with extensions in + are included. + Wildcards in a pathname are expanded. + is a list of allowed file types. Filenames in + with an allowed suffix will be added to the master list. + Filenames with a '.txt' suffix will be parsed to look for allowed + filenames. + is a description of the allowed file types. + is a logger used to print warnings (unrecognized filename types) + and debug messages. + If is not None, it is used to create absolute paths for + , otherwise, the current working directory is used. + """ + master_list = list() + txt_files = list() # Already processed txt files + pathname = None + if isinstance(files, str): + file_list = [x.strip() for x in files.split(',')] + elif isinstance(files, (list, tuple)): + file_list = files + else: + raise ParseInternalError("Bad input, = {}".format(files)) + # end if + if root_path is None: + root_path = os.getcwd() + # end if + master_list, errors = _create_file_list_int(file_list, suffices, file_type, + logger, txt_files, pathname, + root_path, master_list) + if errors: + emsg = 'Error processing list of {} files:\n {}' + raise CCPPError(emsg.format(file_type, '\n '.join(errors))) + # end if + return master_list + +############################################################################### +def replace_paths(dir_list, src_dir, dest_dir): +############################################################################### + """For every path in , replace instances of with + """ + for index, path in enumerate(dir_list): + dir_list[index] = path.replace(src_dir, dest_dir) + # end for + +############################################################################### +def remove_dir(src_dir, force=False): +############################################################################### + """Remove and its children. This operation can only succeed if + contains no files or if is True.""" + currdir = os.getcwd() + src_parent = os.path.split(src_dir)[0] + src_rel = os.path.relpath(src_dir, src_parent) + os.chdir(src_parent) # Prevent removing the parent of src_dir + if force: + leaf_dirs = set() + for root, dirs, files in os.walk(src_rel): + for file in files: + os.remove(os.path.join(root, file)) + # end for + if not dirs: + leaf_dirs.add(root) + # end if + # end for + for ldir in leaf_dirs: + os.removedirs(ldir) + # end for + # end if (no else, always try to remove top level + try: + os.removedirs(src_rel) + except OSError: + pass # Ignore error, fail silently + # end try + os.chdir(currdir) + +############################################################################### +def move_modified_files(src_dir, dest_dir, overwrite=False, remove_src=False): +############################################################################### + """For each file in , move it to if that file is + different in the two locations. + if is True, move all files to , even if unchanged. + If is True, remove when complete.""" + src_files = {} # All files in + if os.path.normpath(src_dir) != os.path.normpath(dest_dir): + for root, _, files in os.walk(src_dir): + for file in files: + src_path = os.path.join(root, file) + if file in src_files: + # We do not allow two files with the same name + emsg = "Duplicate CCPP file found, '{}', original is '{}'" + raise CCPPError(emsg.format(src_path, src_files[file])) + # end if + src_files[file] = src_path + # end for + # end for + for file in src_files: + src_path = src_files[file] + src_file = os.path.relpath(src_path, start=src_dir) + dest_path = os.path.join(dest_dir, src_file) + if os.path.exists(dest_path): + if overwrite: + fmove = True + else: + fmove = filecmp.cmp(src_path, dest_path, shallow=False) + # end if + else: + fmove = True + # end if + if fmove: +#XXgoldyXX: v Crap required to support python 2 + if PY3: + os.replace(src_path, dest_path) + else: + os.rename(src_path, dest_path) + # end if +#XXgoldyXX: ^ Crap required to support python 2 + else: + os.remove(src_path) + # end if + # end for + if remove_src: + remove_dir(src_dir, force=True) + # end if + # end if (no else, take no action if the directories are identical) diff --git a/scripts/fortran_tools/__init__.py b/scripts/fortran_tools/__init__.py index df4d555b..904581b4 100644 --- a/scripts/fortran_tools/__init__.py +++ b/scripts/fortran_tools/__init__.py @@ -1,11 +1,19 @@ """Public API for the fortran_parser library """ +from __future__ import absolute_import +import sys +import os.path +sys.path.insert(0, os.path.dirname(__file__)) + +# pylint: disable=wrong-import-position +from parse_fortran_file import parse_fortran_file +from parse_fortran import parse_fortran_var_decl, fortran_type_definition +from fortran_write import FortranWriter +# pylint: enable=wrong-import-position __all__ = [ 'fortran_type_definition', 'parse_fortran_file', - 'parse_fortran_var_decl' + 'parse_fortran_var_decl', + 'FortranWriter' ] - -from parse_fortran_file import parse_fortran_file -from parse_fortran import parse_fortran_var_decl, fortran_type_definition diff --git a/scripts/fortran_tools/fortran_write.py b/scripts/fortran_tools/fortran_write.py new file mode 100644 index 00000000..ac9a483e --- /dev/null +++ b/scripts/fortran_tools/fortran_write.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python +# + +"""Code to write Fortran code +""" + +# Python library imports +from __future__ import print_function +# CCPP framework imports + +class FortranWriter(object): + """Class to turn output into properly continued and indented Fortran code + >>> FortranWriter("foo.F90", 'r', 'test', 'mod_name') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ValueError: Read mode not allowed in FortranWriter object + >>> FortranWriter("foo.F90", 'wb', 'test', 'mod_name') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ValueError: Binary mode not allowed in FortranWriter object + """ + + ########################################################################### + # Class variables + ########################################################################### + __INDENT = 3 # Spaces per indent level + + __CONTINUE_INDENT = 5 # Extra spaces on continuation line + + __LINE_FILL = 97 # Target line length + + __LINE_MAX = 130 # Max line length + + # CCPP copyright statement to be included in all generated Fortran files + __COPYRIGHT = '''! +! This work (Common Community Physics Package Framework), identified by +! NOAA, NCAR, CU/CIRES, is free of known copyright restrictions and is +! placed in the public domain. +! +! THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +! IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +! FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +! THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +! IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +! CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +''' + + __MOD_HEADER = ''' +!> +!! @brief Auto-generated {file_desc} +!! +! +module {module} +''' + + __MOD_PREAMBLE = ["implicit none", "private"] + + __CONTAINS = ''' +CONTAINS''' + + __MOD_FOOTER = ''' +end module {module}''' + + ########################################################################### + + def indent(self, level=0, continue_line=False): + 'Return an indent string for any level' + indent = self._indent * level + if continue_line: + indent = indent + self._continue_indent + # End if + return indent*' ' + + ########################################################################### + + def find_best_break(self, choices, last=None): + """Find the best line break point given . + If is present, use it as a target line length.""" + if last is None: + last = self._line_fill + # End if + # Find largest good break + possible = [x for x in choices if x < last] + if not possible: + best = self._line_max + 1 + else: + best = max(possible) + # End if + if (best > self._line_max) and (last < self._line_max): + best = self.find_best_break(choices, last=self._line_max) + # End if + return best + + ########################################################################### + + def write(self, statement, indent_level, continue_line=False): + """Write to the open file, indenting to + (see self.indent). + If is True, treat this line as a continuation of + a previous statement.""" + if isinstance(statement, list): + for stmt in statement: + self.write(stmt, indent_level, continue_line) + # End for + elif '\n' in statement: + for stmt in statement.split('\n'): + self.write(stmt, indent_level, continue_line) + # End for + else: + istr = self.indent(indent_level, continue_line) + outstr = istr + statement.strip() + line_len = len(outstr) + if line_len > self._line_fill: + # Collect pretty break points + spaces = list() + commas = list() + sptr = len(istr) + in_single_char = False + in_double_char = False + while sptr < line_len: + if in_single_char: + if outstr[sptr] == "'": + in_single_char = False + # End if (no else, just copy stuff in string) + elif in_double_char: + if outstr[sptr] == '"': + in_double_char = False + # End if (no else, just copy stuff in string) + elif outstr[sptr] == "'": + in_single_char = True + elif outstr[sptr] == '"': + in_double_char = True + elif outstr[sptr] == '!': + # Comment in non-character context, suck in rest of line + spaces.append(sptr-1) + sptr = line_len - 1 + elif outstr[sptr] == ' ': + # Non-quote spaces are where we can break + spaces.append(sptr) + elif outstr[sptr] == ',': + # Non-quote commas are where we can break + commas.append(sptr) + elif outstr[sptr:sptr+2] == '//': + # Non-quote commas are where we can break + commas.append(sptr + 1) + # End if (no else, other characters will be ignored) + sptr = sptr + 1 + # End while + best = self.find_best_break(spaces) + if best >= self._line_fill: + best = self.find_best_break(commas) + # End if + if best > self._line_max: + # This is probably a bad situation that might not + # compile, just write the line and hope for the best. + line_continue = False + elif len(outstr) > best: + # If next line is just comment, do not use continue + # NB: Is this a Fortran issue or just a gfortran issue? + line_continue = outstr[best+1:].lstrip()[0] != '!' + else: + line_continue = True + # End if + if line_continue: + fill = "{}&".format((self._line_fill - best)*' ') + else: + fill = '' + # End if + self._file.write("{}{}\n".format(outstr[0:best+1], fill)) + statement = outstr[best+1:] + self.write(statement, indent_level, continue_line=line_continue) + else: + self._file.write("{}\n".format(outstr)) + # End if + # End if + + ########################################################################### + + def __init__(self, filename, mode, file_description, module_name, + indent=None, continue_indent=None, + line_fill=None, line_max=None): + """Initialize thie FortranWriter object. + Some boilerplate is written automatically.""" + self.__file_desc = file_description + self.__module = module_name + # We only handle writing situations (for now) and only text + if 'r' in mode: + raise ValueError('Read mode not allowed in FortranWriter object') + # end if + if 'b' in mode: + raise ValueError('Binary mode not allowed in FortranWriter object') + # End if + self._file = open(filename, mode) + if indent is None: + self._indent = FortranWriter.__INDENT + else: + self._indent = indent + # End if + if continue_indent is None: + self._continue_indent = FortranWriter.__CONTINUE_INDENT + else: + self._continue_indent = continue_indent + # End if + if line_fill is None: + self._line_fill = FortranWriter.__LINE_FILL + else: + self._line_fill = line_fill + # End if + if line_max is None: + self._line_max = FortranWriter.__LINE_MAX + else: + self._line_max = line_max + # End if + + ########################################################################### + + def write_preamble(self): + """Write the module boilerplate that goes between use statements + and module declarations.""" + self.write("", 0) + for stmt in FortranWriter.__MOD_PREAMBLE: + self.write(stmt, 1) + # end for + self.write("", 0) + + ########################################################################### + + def end_module_header(self): + """Write the module contains statement.""" + self.write(FortranWriter.__CONTAINS, 0) + + ########################################################################### + + def __enter__(self, *args): + self.write(FortranWriter.__COPYRIGHT, 0) + self.write(self.module_header(), 0) + return self + + ########################################################################### + + def __exit__(self, *args): + self.write(FortranWriter.__MOD_FOOTER.format(module=self.__module), 0) + self._file.close() + return False + + ########################################################################### + + def module_header(self): + """Return the standard Fortran module header for and + """ + return FortranWriter.__MOD_HEADER.format(file_desc=self.__file_desc, + module=self.__module) + + ########################################################################### + + @classmethod + def copyright(cls): + """Return the standard Fortran file copyright string""" + return cls.__COPYRIGHT + +############################################################################### +if __name__ == "__main__": + # First, run doctest + import doctest + doctest.testmod() + # Make sure we can write a file + import sys + import os + import os.path + sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + NAME = 'foo' + while os.path.exists(NAME+'.F90'): + NAME = NAME + 'xo' + # End while + NAME = NAME + '.F90' + if os.access(os.getcwd(), os.W_OK): + _CHECK = FortranWriter.copyright().split('\n') + with FortranWriter(NAME, 'w', 'doctest', 'foo') as foo: + foo.write_preamble() + foo.end_module_header() + foo.write(("subroutine foo(long_argument1, long_argument2, " + "long_argument3, long_argument4, long_argument5)"), 2) + foo.write("end subroutine foo", 2) + _CHECK.extend(foo.module_header().rstrip().split('\n')) + # End with + _CHECK.extend(["", "", " implicit none", " private", + "", "", "CONTAINS"]) + _CHECK.extend([(' subroutine foo(long_argument1, long_argument2, ' + 'long_argument3, long_argument4, &'), + ' long_argument5)', + ' end subroutine foo', '', + 'end module foo']) + # Check file + with open(NAME, 'r') as foo: + _STATEMENTS = foo.readlines() + if len(_STATEMENTS) != len(_CHECK): + EMSG = "ERROR: File has {} statements, should have {}" + print(EMSG.format(len(_STATEMENTS), len(_CHECK))) + else: + for _line_num, _statement in enumerate(_STATEMENTS): + if _statement.rstrip() != _CHECK[_line_num]: + EMSG = "ERROR: Line {} does not match" + print(EMSG.format(_line_num+1)) + print("{}".format(_statement.rstrip())) + print("{}".format(_CHECK[_line_num])) + # End if + # End for + # End with + os.remove(NAME) + else: + print("WARNING: Unable to write test file, '{}'".format(NAME)) + # End if +# No else diff --git a/scripts/fortran_tools/parse_fortran.py b/scripts/fortran_tools/parse_fortran.py old mode 100644 new mode 100755 index 4f130b14..1bcd9dd3 --- a/scripts/fortran_tools/parse_fortran.py +++ b/scripts/fortran_tools/parse_fortran.py @@ -1,15 +1,23 @@ #!/usr/bin/env python +"""Types and code for parsing Fortran source code. +""" + +# pylint: disable=wrong-import-position if __name__ == '__main__' and __package__ is None: import sys import os.path sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import re from parse_tools import ParseSyntaxError, ParseInternalError -from parse_tools import ParseContext, ParseSource, context_string -from parse_tools import check_fortran_intrinsic, check_fortran_type -from parse_tools import check_balanced_paren +from parse_tools import ParseContext, context_string +from parse_tools import check_fortran_intrinsic +from parse_tools import check_balanced_paren, unique_standard_name +#pylint: disable=unused-import +from parse_tools import ParseSource # Used in doctest +#pylint: enable=unused-import from metavar import Var +# pylint: enable=wrong-import-position # A collection of types and tools for parsing Fortran code to support # CCPP metadata parsing. The purpose of this code is limited to type @@ -19,15 +27,15 @@ ######################################################################## # Fortran ID specifier (do not want a group like FORTRAN_ID from parse_tools) -_fortran_id_ = r"(?:[A-Za-z][A-Za-z0-9_]*)" +_FORTRAN_ID = r"(?:[A-Za-z][A-Za-z0-9_]*)" # Regular expression for a dimension specifier -_dimid_ = r"(?:"+_fortran_id_+r"|[0-9]+)" -_dimcolon_ = r"(?:\s*:\s*"+_dimid_+r"?\s*)" -_dimcolons_ = r"(?:"+_dimid_+r"?"+_dimcolon_+_dimcolon_+r"?)" -_dimspec_ = r"(?:"+_dimid_+r"|"+_dimcolons_+r")" -_dims_list_ = _dimspec_+r"(?:\s*,\s*"+_dimspec_+r"){0,6}" +_DIMID = r"(?:"+_FORTRAN_ID+r"|[0-9]+)" +_DIMCOLON = r"(?:\s*:\s*"+_DIMID+r"?\s*)" +_DIMCOLONS = r"(?:"+_DIMID+r"?"+_DIMCOLON+_DIMCOLON+r"?)" +_DIMSPEC = r"(?:"+_DIMID+r"|"+_DIMCOLONS+r")" +_dims_list_ = _DIMSPEC+r"(?:\s*,\s*"+_DIMSPEC+r"){0,6}" # Regular expression for a variable name with optional dimensions -_var_id_re_ = re.compile(r"("+_fortran_id_+r")\s*(\(\s*"+_dims_list_+r"\s*\))?$") +_VAR_ID_RE = re.compile(r"("+_FORTRAN_ID+r")\s*(\(\s*"+_dims_list_+r"\s*\))?$") ######################################################################## @@ -62,108 +70,144 @@ class Ftype(object): # Note that "character" is not in intrinsic_types even though it is a # Fortran intrinsic. This is because character has its own type. - __intrinsic_types__ = [ r"integer", r"real", r"logical", - r"double\s*precision", r"complex" ] - - itype_re = re.compile(r"(?i)({})\s*(\([A-Za-z0-9,=_\s]+\))?".format(r"|".join(__intrinsic_types__))) - kind_re = re.compile(r"(?i)kind\s*(\()?\s*([\'\"])?(.+?)([\'\"])?\s*(\))?") + __intrinsic_types__ = [r"integer", r"real", r"logical", + r"double\s*precision", r"complex"] - __attr_spec__ = ['allocatable', 'asynchronous', 'dimension', 'external', - 'intent', 'intrinsic', 'bind', 'optional', 'parameter', - 'pointer', 'private', 'protected', 'public', 'save', - 'target', 'value', 'volatile'] + __itype_re = re.compile(r"(?i)({})\s*(\([A-Za-z0-9,=_\s]+\))?".format(r"|".join(__intrinsic_types__))) + __kind_re = re.compile(r"(?i)kind\s*(\()?\s*([\'\"])?(.+?)([\'\"])?\s*(\))?") - __sname_num__ = 0 # Counter for unique standard names + __attr_spec = ['allocatable', 'asynchronous', 'dimension', 'external', + 'intent', 'intrinsic', 'bind', 'optional', 'parameter', + 'pointer', 'private', 'protected', 'public', 'save', + 'target', 'value', 'volatile'] - def __init__(self, typestr_in=None, kind_in=None, line_in=None, context=None): + def __init__(self, typestr_in=None, kind_in=None, match_len_in=None, + line_in=None, context=None): + """Initialize this FType object, either using and + , OR using line_in.""" if context is None: - self._context = ParseContext() + self.__context = ParseContext() else: - self._context = ParseContext(context=context) + self.__context = ParseContext(context=context) + # end if # We have to distinguish which type of initialization we have + self.__typestr = typestr_in if typestr_in is not None: if line_in is not None: - raise ParseInternalError("typestr_in and line_in cannot both be used in a single call", self._context) - # End if - self._typestr = typestr_in - self.default_kind = kind_in is None + emsg = "Cannot pass both typestr_in and line_in as arguments" + raise ParseInternalError(emsg, self.__context) + # end if + self.__default_kind = kind_in is None if kind_in is None: - self._kind = None + self.__kind = None elif kind_in[0] == '(': # Parse an explicit kind declaration - self._kind = self.parse_kind_selector(kind_in) + self.__kind = self.parse_kind_selector(kind_in) else: # The kind has already been parsed for us (e.g., by character) - self._kind = kind_in + self.__kind = kind_in + # end if + if match_len_in is not None: + self.__match_len = match_len_in + else: + self.__match_len = len(self.typestr) + if kind_in is not None: + self.__match_len += len(self.__kind) + 2 + # end if + # end if elif kind_in is not None: - raise ParseInternalError("kind_in cannot be passed without typestr_in", self._context) + emsg = "kind_in cannot be passed without typestr_in" + raise ParseInternalError(emsg, self.__context) elif line_in is not None: match = Ftype.type_match(line_in) - self._match_len = len(match.group(0)) if match is None: - raise ParseSyntaxError("type declaration", token=line_in, context=self._context) - elif check_fortran_intrinsic(match.group(1)): - self._typestr = match.group(1) + emsg = "type declaration" + raise ParseSyntaxError(emsg, token=line_in, + context=self.__context) + # end if + if match_len_in is not None: + self.__match_len = match_len_in + else: + self.__match_len = len(match.group(0)) + # end if + if check_fortran_intrinsic(match.group(1)): + self.__typestr = match.group(1) if match.group(2) is not None: # Parse kind section - self._kind = self.parse_kind_selector(match.group(2).strip()) + kmatch = match.group(2).strip() + self.__kind = self.parse_kind_selector(kmatch) else: - self._kind = None - # End if - self.default_kind = self._kind is None + self.__kind = None + # end if + self.__default_kind = self.__kind is None else: - raise ParseSyntaxError("type declaration", token=line_in, context=self._context) + raise ParseSyntaxError("type declaration", + token=line_in, context=self.__context) + # end if else: - raise ParseInternalError("At least one of typestr_in or line must be passed", self._context) + emsg = "At least one of typestr_in or line_in must be passed" + raise ParseInternalError(emsg, self.__context) + # end if def parse_kind_selector(self, kind_selector, context=None): + """Find and return the 'kind' value from + '(foo)' and '(kind=foo)' both return 'foo'""" if context is None: if hasattr(self, 'context'): - context = self._context + context = self.__context else: context = ParseContext() - # End if + # end if kind = None if (kind_selector[0] == '(') and (kind_selector[-1] == ')'): args = kind_selector[1:-1].split('=') else: args = kind_selector.split('=') - # End if + # end if if (len(args) > 2) or (len(args) < 1): - raise ParseSyntaxError("kind_selector", token=kind_selector, context=context) - elif len(args) == 1: + raise ParseSyntaxError("kind_selector", + token=kind_selector, context=context) + # end if + if len(args) == 1: kind = args[0].strip() elif args[0].strip().lower() != 'kind': # We have two args, the first better be kind - raise ParseSyntaxError("kind_selector", token=kind_selector, context=context) - else: + raise ParseSyntaxError("kind_selector", + token=kind_selector, context=context) + # end if + if kind is None: # We have two args and the second is our kind string kind = args[1].strip() - # End if + # end if # One last check for missing right paren - match = Ftype.kind_re.search(kind) + match = Ftype.__kind_re.search(kind) if match is not None: if match.group(2) is not None: if match.group(2) != match.group(4): - raise ParseSyntaxError("kind_selector", token=kind_selector, context=context) - elif (match.group(1) is None) and (match.group(5) is not None): - raise ParseSyntaxError("kind_selector", token=kind_selector, context=context) - elif (match.group(1) is not None) and (match.group(5) is None): - raise ParseSyntaxError("kind_selector", token=kind_selector, context=context) - else: - pass + raise ParseSyntaxError("kind_selector", + token=kind_selector, context=context) + # end if + if (match.group(1) is None) and (match.group(5) is not None): + raise ParseSyntaxError("kind_selector", + token=kind_selector, context=context) + # end if + if (match.group(1) is not None) and (match.group(5) is None): + raise ParseSyntaxError("kind_selector", + token=kind_selector, context=context) + # end if else: pass elif kind[0:4].lower() == "kind": - raise ParseSyntaxError("kind_selector", token=kind_selector, context=context) - else: - pass + # Got 'something' == 'kind'?? + raise ParseSyntaxError("kind_selector", + token=kind_selector, context=context) + # end if return kind @classmethod def type_match(cls, line): """Return an RE match if represents an Ftype declaration""" - match = Ftype.itype_re.match(line.strip()) + match = Ftype.__itype_re.match(line.strip()) return match @classmethod @@ -176,32 +220,32 @@ def reassemble_parens(cls, propstr, errstr, context, splitstr=','): >>> Ftype.reassemble_parens("dimension(size(Grid%xlon,1),NSPC1), intent(in)", 'spec', ParseContext()) ['dimension(size(Grid%xlon,1),NSPC1)', 'intent(in)'] """ - vars = list() + var_list = list() proplist = propstr.split(splitstr) while len(proplist) > 0: var = proplist.pop(0) while var.count('(') != var.count(')'): if len(proplist) == 0: raise ParseSyntaxError(errstr, token=propstr, context=context) - # End if + # end if var = var + ',' + proplist.pop(0) - # End while + # end while var = var.strip() if len(var) > 0: - vars.append(var) - # End if - # End while - return vars + var_list.append(var) + # end if + # end while + return var_list @classmethod def parse_attr_specs(cls, propstring, context): - 'Return a list of variable properties' + """Return a list of variable properties""" properties = list() # Remove leading comma propstring = propstring.strip() - if (len(propstring) > 0) and (propstring[0] == ','): + if propstring and (propstring[0] == ','): propstring = propstring[1:].lstrip() - # End if + # end if proplist = cls.reassemble_parens(propstring, 'attr_spec', context) for prop in proplist: prop = prop.strip().lower() @@ -210,79 +254,84 @@ def parse_attr_specs(cls, propstring, context): pval = prop[0:prop.index('(')].strip() else: pval = prop - # End if - if pval not in cls.__attr_spec__: + # end if + if pval not in cls.__attr_spec: raise ParseSyntaxError('attr_spec', token=prop, context=context) - # End if + # end if properties.append(prop) - # End for + # end for return properties - @classmethod - def unique_standard_name(cls): - cls.__sname_num__ = cls.__sname_num__ + 1 - return 'enter_standard_name_{}'.format(cls.__sname_num__) - @property def typestr(self): - return self._typestr + """ Return this FType object's type string""" + return self.__typestr @property + def default_kind(self): + """Return True iff this FType object is of default kind.""" + return self.__default_kind + def kind(self): - return self._kind + """ Return this FType's kind string""" + return self.__kind @property def type_len(self): - return self._match_len + """ Return the length of this FType's kind string""" + return self.__match_len def __str__(self): """Return a string of the declaration of the type""" if self.default_kind: return self.typestr - elif check_fortran_intrinsic(self.typestr): - return "{}(kind={})".format(self.typestr, self._kind) - else: - # Derived type - return "{}({})".format(self.typestr, self._kind) + # end if + if check_fortran_intrinsic(self.typestr): + return "{}(kind={})".format(self.typestr, self.__kind) + # end if + # Derived type + return "{}({})".format(self.typestr, self.__kind) ######################################################################## -class Ftype_character(Ftype): - """Ftype_character is a type that represents character types - >>> Ftype_character.type_match('character') #doctest: +ELLIPSIS - <_sre.SRE_Match object at 0x...> - >>> Ftype_character.type_match('CHARACTER') #doctest: +ELLIPSIS - <_sre.SRE_Match object at 0x...> - >>> Ftype_character.type_match('chaRActer (len=*)') #doctest: +ELLIPSIS - <_sre.SRE_Match object at 0x...> - >>> Ftype_character.type_match('integer') - - >>> Ftype_character('character', ParseContext(169, 'foo.F90')).__str__() +class FtypeCharacter(Ftype): + """FtypeCharacter is a type that represents character types + >>> FtypeCharacter.type_match('character') #doctest: +ELLIPSIS + + >>> FtypeCharacter.type_match('CHARACTER') #doctest: +ELLIPSIS + + >>> FtypeCharacter.type_match('chaRActer (len=*)') #doctest: +ELLIPSIS + + >>> FtypeCharacter.type_match('integer') + + >>> FtypeCharacter('character', ParseContext(169, 'foo.F90')).__str__() Traceback (most recent call last): - ParseSyntaxError: Invalid character declaration, 'character', at foo.F90:170 - >>> Ftype_character('character ::', ParseContext(171, 'foo.F90')).__str__() + parse_source.ParseSyntaxError: Invalid character declaration, 'character', at foo.F90:170 + >>> FtypeCharacter('character ::', ParseContext(171, 'foo.F90')).__str__() 'character(len=1)' - >>> Ftype_character('CHARACTER(len=*)', ParseContext(174, 'foo.F90')).__str__() + >>> FtypeCharacter('CHARACTER(len=*)', ParseContext(174, 'foo.F90')).__str__() 'CHARACTER(len=*)' - >>> Ftype_character('CHARACTER(len=:)', None).__str__() + >>> FtypeCharacter('CHARACTER(len=:)', None).__str__() 'CHARACTER(len=:)' - >>> Ftype_character('character(*)', None).__str__() + >>> FtypeCharacter('Character(len=512)', None).__str__() + 'Character(len=512)' + >>> FtypeCharacter('character(*)', None).__str__() 'character(len=*)' - >>> Ftype_character('character*7', None).__str__() #doctest: +IGNORE_EXCEPTION_DETAIL + >>> FtypeCharacter('character*7', None).__str__() #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid character declaration, 'character*7', at :1 - >>> Ftype_character('character*7,', None).__str__() + >>> FtypeCharacter('character*7,', None).__str__() 'character(len=7)' - >>> Ftype_character("character (kind=kind('a')", None).__str__() #doctest: +IGNORE_EXCEPTION_DETAIL + >>> FtypeCharacter("character (kind=kind('a')", None).__str__() #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid kind_selector, 'kind=kind('a'', at :1 - >>> Ftype_character("character (kind=kind('a'))", None).__str__() + >>> FtypeCharacter("character (kind=kind('a'))", None).__str__() "character(len=1, kind=kind('a'))" - >>> Ftype_character("character (13, kind=kind('a'))", None).__str__() + >>> FtypeCharacter("character (13, kind=kind('a'))", None).__str__() "character(len=13, kind=kind('a'))" - >>> Ftype_character("character (len=13, kind=kind('a'))", None).__str__() + >>> FtypeCharacter("character (len=13, kind=kind('a'))", None).__str__() "character(len=13, kind=kind('a'))" - >>> Ftype_character("character (kind=kind('b'), len=15)", None).__str__() + >>> FtypeCharacter("character (kind=kind('b'), len=15)", None).__str__() "character(len=15, kind=kind('b'))" """ @@ -294,12 +343,13 @@ class Ftype_character(Ftype): @classmethod def type_match(cls, line): - """Return an RE match if represents an Ftype_character declaration""" + """Return an RE match if represents an FtypeCharacter + declaration""" # Try old style first to eliminate as a possibility - match = Ftype_character.oldchar_re.match(line.strip()) + match = FtypeCharacter.oldchar_re.match(line.strip()) if match is None: - match = Ftype_character.char_re.match(line.strip()) - # End if + match = FtypeCharacter.char_re.match(line.strip()) + # end if return match def __init__(self, line, context): @@ -307,109 +357,132 @@ def __init__(self, line, context): clen = None kind = None # This will be interpreted as default kind - match = Ftype_character.type_match(line) + match = FtypeCharacter.type_match(line) if match is None: raise ParseSyntaxError("character declaration", token=line, context=context) - elif len(match.groups()) == 3: - self._match_len = len(match.group(0)) + # end if + match_len = len(match.group(0)) + if len(match.groups()) == 3: # We have an old style character declaration if match.group(2) != '*': raise ParseSyntaxError("character declaration", token=line, context=context) - elif Ftype_character.oldchartrail_re.match(line.strip()[len(match.group(0)):]) is None: - raise ParseSyntaxError("character declaration", token=line, context=context) - else: - clen = match.group(3) - # End if + # end if + if FtypeCharacter.oldchartrail_re.match(line.strip()[len(match.group(0)):]) is None: + raise ParseSyntaxError("character declaration", + token=line, context=context) + # end if + clen = match.group(3) elif match.group(2) is not None: - self._match_len = len(match.group(0)) # Parse attributes (strip off parentheses) - attrs = [ x.strip() for x in match.group(2)[1:-1].split(',') ] - if len(attrs) == 0: + attrs = [x.strip() for x in match.group(2)[1:-1].split(',')] + if not attrs: # Empty parentheses is not allowed - raise ParseSyntaxError("char_selector", token=match.group(2), context=context) + raise ParseSyntaxError("char_selector", + token=match.group(2), context=context) + # end if if len(attrs) > 2: # Too many attributes! - raise ParseSyntaxError("char_selector", token=match.group(2), context=context) - elif attrs[0][0:4].lower() == "kind": + raise ParseSyntaxError("char_selector", + token=match.group(2), context=context) + # end if + if attrs[0][0:4].lower() == "kind": # The first arg is kind, try to parse it kind = self.parse_kind_selector(attrs[0], context=context) # If there is a second arg, it must be of form len= if len(attrs) == 2: - clen = self.parse_len_select(attrs[1], context, len_optional=False) + clen = self.parse_len_select(attrs[1], + context, len_optional=False) elif len(attrs) == 2: # We have both a len and a kind, len first - clen = self.parse_len_select(attrs[0], context, len_optional=True) + clen = self.parse_len_select(attrs[0], + context, len_optional=True) kind = self.parse_kind_selector(attrs[1], context) else: # We just a len argument - clen = self.parse_len_select(attrs[0], context, len_optional=True) - # End if + clen = self.parse_len_select(attrs[0], + context, len_optional=True) + # end if else: - self._match_len = len(match.group(0)) # We had better check the training characters - if Ftype_character.chartrail_re.match(line.strip()[len(match.group(0)):]) is None: - raise ParseSyntaxError("character declaration", token=line, context=context) - # End if + if FtypeCharacter.chartrail_re.match(line.strip()[len(match.group(0)):]) is None: + raise ParseSyntaxError("character declaration", + token=line, context=context) + # end if + # end if if clen is None: clen = 1 - # End if + # end if self.lenstr = "{}".format(clen) - super(Ftype_character, self).__init__(typestr_in=match.group(1), kind_in=kind, context=context) + super(FtypeCharacter, self).__init__(typestr_in=match.group(1), + kind_in=kind, + match_len_in=match_len, + context=context) def parse_len_token(self, token, context): """Check to make sure token is a valid length identifier""" - match = Ftype_character.len_token_re.match(token) + match = FtypeCharacter.len_token_re.match(token) if match is not None: return match.group(1) - else: - raise ParseSyntaxError("length type-param-value", token=token, context=context) + # end if + raise ParseSyntaxError("length type-param-value", + token=token, context=context) + # end if def parse_len_select(self, lenselect, context, len_optional=True): """Parse a character type length_selector""" - largs = [ x.strip() for x in lenselect.split('=') ] + largs = [x.strip() for x in lenselect.split('=')] if len(largs) > 2: raise ParseSyntaxError("length_selector", token=lenselect, context=context) - elif (not len_optional) and ((len(largs) != 2) or (largs[0].lower() != 'len')): + # end if + if (not len_optional) and ((len(largs) != 2) or (largs[0].lower() != 'len')): raise ParseSyntaxError("length_selector when len= is required", token=lenselect, context=context) - elif len(largs) == 2: + # end if + if len(largs) == 2: if largs[0].lower() != 'len': raise ParseSyntaxError("length_selector", token=lenselect, context=context) - else: - return self.parse_len_token(largs[1], context) + # end if + return self.parse_len_token(largs[1], context) elif len_optional: return self.parse_len_token(largs[0], context) else: raise ParseSyntaxError("length_selector when len= is required", token=lenselect, context=context) + # end if + + def kind(self): + """Return a kind metadata declaration if this Ftype object is of + a non-default kind. + Otherwise, return an empty string.""" + if self.default_kind: + kind_str = "" + else: + kind_str = ", kind={}".format(super(FtypeCharacter, self).kind()) + # end if + return "len={}{}".format(self.lenstr, kind_str) def __str__(self): """Return a string of the declaration of the type For characters, we will always print an explicit len modifier """ - if self.default_kind: - kind_str = "" - else: - kind_str = ", kind={}".format(self.kind) - # End if - return "{}(len={}{})".format(self.typestr, self.lenstr, kind_str) + return "{}({})".format(self.typestr, self.kind()) ######################################################################## -class Ftype_type_decl(Ftype): - """Ftype_type_decl is a type that represents derived Fortran type +class FtypeTypeDecl(Ftype): + """FtypeTypeDecl is a type that represents derived Fortran type declarations. - >>> Ftype_type_decl.type_match('character') + >>> FtypeTypeDecl.type_match('character') - >>> Ftype_type_decl.type_match('type(foo)') #doctest: +ELLIPSIS - <_sre.SRE_Match object at 0x...> - >>> Ftype_type_decl.type_def_line('type GFS_statein_type') + >>> FtypeTypeDecl.type_match('type(foo)') #doctest: +ELLIPSIS + + >>> FtypeTypeDecl.type_def_line('type GFS_statein_type') ['GFS_statein_type', None, None] - >>> Ftype_type_decl.type_def_line('type GFS_statein_type (n, m) ') + >>> FtypeTypeDecl.type_def_line('type GFS_statein_type (n, m) ') ['GFS_statein_type', None, '(n, m)'] - >>> Ftype_type_decl.type_def_line('type, public, extends(foo) :: GFS_statein_type') + >>> FtypeTypeDecl.type_def_line('type, public, extends(foo) :: GFS_statein_type') ['GFS_statein_type', ['public', 'extends(foo)'], None] - >>> Ftype_type_decl.type_def_line('type(foo) :: bar') + >>> FtypeTypeDecl.type_def_line('type(foo) :: bar') - >>> Ftype_type_decl.type_def_line('type foo ! this is a comment') + >>> FtypeTypeDecl.type_def_line('type foo ! This is a comment') ['foo', None, None] """ @@ -419,21 +492,22 @@ class Ftype_type_decl(Ftype): def __init__(self, line, context): """Initialize an extended type from a declaration line""" - match = Ftype_type_decl.type_match(line) + match = FtypeTypeDecl.type_match(line) if match is None: - raise ParseSyntaxError("type declaration", token=line, context=context) - else: - self._match_len = len(match.group(0)) - self._class = match.group(1) - self._typestr = match.group(2) - self._kind = self.typestr - # End if + raise ParseSyntaxError("type declaration", + token=line, context=context) + # end if + super(FtypeTypeDecl, self).__init__(typestr_in=match.group(2), + kind_in=match.group(2), + match_len_in=len(match.group(0)), + context=context) + self.__class = match.group(1) @classmethod def type_match(cls, line): - """Return an RE match if represents an Ftype_type_decl declaration""" - match = Ftype_type_decl.__type_decl_re__.match(line.strip()) - # End if + """Return an RE match if represents an FtypeTypeDecl declaration""" + match = FtypeTypeDecl.__type_decl_re__.match(line.strip()) + # end if return match @classmethod @@ -446,7 +520,7 @@ def type_def_line(cls, line): sline = line[0:line.index('!')].strip() else: sline = line.strip() - # End if + # end if if sline.lower()[0:4] == 'type': if '::' in sline: elements = sline.split('::') @@ -456,35 +530,36 @@ def type_def_line(cls, line): # Plain type decl type_name = sline.split(' ', 1)[1].strip() type_props = None - # End if + # end if if '(' in type_name: tnstr = type_name.split('(') type_name = tnstr[0].strip() type_params = '(' + tnstr[1].rstrip() else: type_params = None - # End if + # end if type_def = [type_name, type_props, type_params] - # End if - # End if + # end if + # end if return type_def def __str__(self): - return '{}({})'.format(self._class, self.typestr) + """Return a printable string for this Ftype object""" + return '{}({})'.format(self.__class, self.typestr) ######################################################################## -def Ftype_factory(line, context): +def ftype_factory(line, context): ######################################################################## "Return an appropriate type object if there is a match, otherwise None" # We have to cut off the line at the end of any possible type info # Strip comments first (might have an = character) if '!' in line: line = line[0:line.index('!')].rstrip() - # End if + # end if ppos = line.find('(') cpos = line.find(',') if ppos >= 0: - if (cpos >= 0) and (cpos < ppos): + if 0 <= cpos < ppos: # Whatever parentheses there are, they are not part of type line = line[0:cpos] else: @@ -497,102 +572,115 @@ def Ftype_factory(line, context): depth = depth + 1 elif line[pepos] == ')': depth = depth - 1 - # End if + # end if pepos = pepos + 1 - # End while + # end while line = line[0:pepos+1] - # End if + # end if elif cpos >= 0: line = line[0:cpos] - # End if + # end if tmatch = Ftype.type_match(line) if tmatch is None: tobj = None else: tobj = Ftype(line_in=line, context=context) - # End if + # end if if tmatch is None: - tmatch = Ftype_character.type_match(line) + tmatch = FtypeCharacter.type_match(line) if tmatch is not None: - tobj = Ftype_character(line, context) - # End if - # End if + tobj = FtypeCharacter(line, context) + # end if + # end if if tmatch is None: - tmatch = Ftype_type_decl.type_match(line) + tmatch = FtypeTypeDecl.type_match(line) if tmatch is not None: - tobj = Ftype_type_decl(line, context) - # End if - # End if + tobj = FtypeTypeDecl(line, context) + # end if + # end if return tobj ######################################################################## def fortran_type_definition(line): ######################################################################## - return Ftype_type_decl.type_def_line(line) + """Return a type information if represents the start + of a type definition""" + return FtypeTypeDecl.type_def_line(line) ######################################################################## def parse_fortran_var_decl(line, source, logger=None): ######################################################################## """Parse a Fortran variable declaration line and return a list of Var objects representing the variables declared on . - >>> _var_id_re_.match('foo') #doctest: +ELLIPSIS - <_sre.SRE_Match object at 0x...> - >>> _var_id_re_.match("foo()") + >>> _VAR_ID_RE.match('foo') #doctest: +ELLIPSIS + + >>> _VAR_ID_RE.match("foo()") - >>> _var_id_re_.match('foo').group(1) + >>> _VAR_ID_RE.match('foo').group(1) 'foo' - >>> _var_id_re_.match('foo').group(2) + >>> _VAR_ID_RE.match('foo').group(2) - >>> _var_id_re_.match("foo(bar)").group(1) + >>> _VAR_ID_RE.match("foo(bar)").group(1) 'foo' - >>> _var_id_re_.match("foo(bar)").group(2) + >>> _VAR_ID_RE.match("foo(bar)").group(2) '(bar)' - >>> _var_id_re_.match("foo(bar)").group(2) + >>> _VAR_ID_RE.match("foo(bar)").group(2) '(bar)' - >>> _var_id_re_.match("foo(bar, baz)").group(2) + >>> _VAR_ID_RE.match("foo(bar, baz)").group(2) '(bar, baz)' - >>> _var_id_re_.match("foo(bar : baz)").group(2) + >>> _VAR_ID_RE.match("foo(bar : baz)").group(2) '(bar : baz)' - >>> _var_id_re_.match("foo(bar:)").group(2) + >>> _VAR_ID_RE.match("foo(bar:)").group(2) '(bar:)' - >>> _var_id_re_.match("foo(: baz)").group(2) + >>> _VAR_ID_RE.match("foo(: baz)").group(2) '(: baz)' - >>> _var_id_re_.match("foo(:, :,:)").group(2) + >>> _VAR_ID_RE.match("foo(:, :,:)").group(2) '(:, :,:)' - >>> _var_id_re_.match("foo(8)").group(2) + >>> _VAR_ID_RE.match("foo(8)").group(2) '(8)' - >>> _var_id_re_.match("foo(::,a:b,a:,:b)").group(2) + >>> _VAR_ID_RE.match("foo(::,a:b,a:,:b)").group(2) '(::,a:b,a:,:b)' - >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('local_name') + >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('local_name') 'foo' - >>> parse_fortran_var_decl("integer :: foo = 0", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('local_name') + >>> parse_fortran_var_decl("integer :: foo = 0", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('local_name') 'foo' - >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('optional') - False - >>> parse_fortran_var_decl("integer, optional :: foo", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('optional') + >>> parse_fortran_var_decl("integer :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('optional') + + >>> parse_fortran_var_decl("integer, optional :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('optional') 'True' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') '(:)' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo(bar)", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo(bar)", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') '(bar)' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') '(:,:)' - >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'MODULE', ParseContext()))[1].get_prop_value('dimensions') + >>> parse_fortran_var_decl("integer, dimension(:) :: foo(:,:), baz", ParseSource('foo.F90', 'module', ParseContext()))[1].get_prop_value('dimensions') '(:)' - >>> parse_fortran_var_decl("real (kind=kind_phys), pointer :: phii (:,:) => null() !< interface geopotential height", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("real (kind=kind_phys), pointer :: phii (:,:) => null() !< interface geopotential height", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('dimensions') '(:,:)' - >>> parse_fortran_var_decl("real(kind=kind_phys), dimension(im, levs, ntrac), intent(in) :: qgrs", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('dimensions') + >>> parse_fortran_var_decl("real(kind=kind_phys), dimension(im, levs, ntrac), intent(in) :: qgrs", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('dimensions') '(im, levs, ntrac)' - >>> parse_fortran_var_decl("character(len=*), intent(out) :: errmsg", ParseSource('foo.F90', 'MODULE', ParseContext()))[0].get_prop_value('local_name') + >>> parse_fortran_var_decl("character(len=*), intent(out) :: errmsg", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('local_name') 'errmsg' + >>> parse_fortran_var_decl("character(len=512), intent(out) :: errmsg", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('kind') + 'len=512' + >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(8)", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('dimensions') + '(8)' + >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(size(bar))", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_prop_value('dimensions') + '(size(bar))' + >>> parse_fortran_var_decl("real(kind_phys), intent(out) :: foo(8)", ParseSource('foo.F90', 'scheme', ParseContext()))[0].get_dimensions() + ['8'] + >>> parse_fortran_var_decl("character(len=*), intent(out) :: errmsg", ParseSource('foo.F90', 'module', ParseContext()))[0].get_prop_value('local_name') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseSyntaxError: Invalid variable declaration, character(len=*), intent(out) :: errmsg, intent not allowed in module variable, in """ context = source.context sline = line.strip() # Strip comments first if '!' in sline: sline = sline[0:sline.index('!')].rstrip() - # End if - tobject = Ftype_factory(sline, context) + # end if + tobject = ftype_factory(sline, context) newvars = list() if tobject is not None: varprops = sline[tobject.type_len:].strip() @@ -605,25 +693,39 @@ def parse_fortran_var_decl(line, source, logger=None): varprops = Ftype.parse_attr_specs(elements[0].strip(), context) for prop in varprops: if prop[0:6] == 'intent': - intent = prop[6:].strip()[1:-1].strip() + if source.type != 'scheme': + typ = source.type + errmsg = 'Invalid variable declaration, {}, intent' + errmsg = errmsg + ' not allowed in {} variable' + if logger is not None: + ctx = context_string(context) + errmsg = "WARNING: " + errmsg + "{}" + logger.warning(errmsg.format(sline, typ, ctx)) + else: + raise ParseSyntaxError(errmsg.format(sline, typ), + context=context) + # end if + else: + intent = prop[6:].strip()[1:-1].strip() + # end if elif prop[0:9:] == 'dimension': dimensions = prop[9:].strip() - # End if - # End for + # end if + # end for else: # No attr_specs varlist = varprops varprops = list() - # End if + # end if # Create Vars from these pieces # We may need to reassemble multi-dimensional specs - vars = Ftype.reassemble_parens(varlist, 'variable_list', context) - for var in vars: + var_list = Ftype.reassemble_parens(varlist, 'variable_list', context) + for var in var_list: prop_dict = {} if '=' in var: # We do not care about initializers var = var[0:var.rindex('=')].rstrip() - # End if + # end if # Scan and gather variable pieces inchar = None # Character context var_len = len(var) @@ -637,48 +739,57 @@ def parse_fortran_var_decl(line, source, logger=None): if (begin < 0) or (end < 0): if logger is not None: ctx = context_string(context) - logger.warning("WARNING: Invalid variable declaration, {}{}".format(var, ctx)) + errmsg = "WARNING: Invalid variable declaration, {}{}" + logger.warning(errmsg.format(var, ctx)) else: - raise ParseSyntaxError('variable declaration', token=var, context=context) - # End if + raise ParseSyntaxError('variable declaration', + token=var, context=context) + # end if else: dimspec = var[begin:end+1] - # End if - # End if + # end if + # end if prop_dict['local_name'] = varname - prop_dict['standard_name'] = Ftype.unique_standard_name() + prop_dict['standard_name'] = unique_standard_name() prop_dict['units'] = '' - prop_dict['type'] = tobject.typestr - if tobject.kind is not None: - prop_dict['kind'] = tobject.kind - # End if + if isinstance(tobject, FtypeTypeDecl): + prop_dict['ddt_type'] = tobject.typestr + else: + prop_dict['type'] = tobject.typestr + # end if + if tobject.kind() is not None: + prop_dict['kind'] = tobject.kind() + # end if if 'optional' in varprops: prop_dict['optional'] = 'True' - # End if + # end if if 'allocatable' in varprops: prop_dict['allocatable'] = 'True' - # End if + # end if if intent is not None: prop_dict['intent'] = intent - # End if + # end if if dimspec is not None: prop_dict['dimensions'] = dimspec elif dimensions is not None: prop_dict['dimensions'] = dimensions else: prop_dict['dimensions'] = '()' - # End if + # end if # XXgoldyXX: I am nervous about allowing invalid Var objects here - newvars.append(Var(prop_dict, source, - invalid_ok=(logger is not None), logger=logger)) - # End for + # Also, this tends to cause an exception that ends up back here + # which is not a good idea. + var = Var(prop_dict, source, + invalid_ok=(logger is not None), logger=logger) + newvars.append(var) + # end for # No else (not a variable declaration) - # End if + # end if return newvars ######################################################################## # Future classes -#class Ftype_type_def(Ftype_type_decl) # Not sure about that super class +#class Ftype_type_def(FtypeTypeDecl) # Not sure about that super class #class Fmodule_spec(object) # vars and types from a module specification part # Fmodule_spec will contain a list of documented variables and a list of # documented type definitions diff --git a/scripts/fortran_tools/parse_fortran_file.py b/scripts/fortran_tools/parse_fortran_file.py index c6019b66..b37bd8db 100755 --- a/scripts/fortran_tools/parse_fortran_file.py +++ b/scripts/fortran_tools/parse_fortran_file.py @@ -6,38 +6,49 @@ Subroutines, functions, or data are not supported outside a MODULE. """ +# Python library imports import os.path if __name__ == '__main__' and __package__ is None: import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +# end if +# pylint: disable=wrong-import-position import re +from collections import OrderedDict +# CCPP framework imports from parse_tools import CCPPError, ParseInternalError, ParseSyntaxError from parse_tools import ParseContext, ParseObject, ParseSource, PreprocStack -from parse_tools import FORTRAN_ID -from metadata_table import MetadataHeader -from parse_fortran import parse_fortran_var_decl, fortran_type_definition +from parse_tools import FORTRAN_ID, context_string +from metadata_table import MetadataTable +try: + from parse_fortran import parse_fortran_var_decl, fortran_type_definition +except ModuleNotFoundError: + from .parse_fortran import parse_fortran_var_decl, fortran_type_definition +# end try from metavar import VarDictionary - -comment_re = re.compile(r"!.*$") -fixed_comment_re = re.compile(r"(?i)([C*]|(?:[ ]{0,4}!))") -program_re = re.compile(r"(?i)\s*program\s+"+FORTRAN_ID) -endprogram_re = re.compile(r"(?i)\s*end\s*program\s+"+FORTRAN_ID+r"?") -module_re = re.compile(r"(?i)\s*module\s+"+FORTRAN_ID) -endmodule_re = re.compile(r"(?i)\s*end\s*module\s+"+FORTRAN_ID+r"?") -contains_re = re.compile(r"(?i)\s*contains") -continue_re = re.compile(r"(?i)&\s*(!.*)?$") -fixed_continue_re = re.compile(r"(?i) [^0 ]") -blank_re = re.compile(r"\s+") -arg_table_start_re = re.compile(r"(?i)![!>]\s*(?:\\section)?\s*arg_table_"+FORTRAN_ID) -arg_table_end_re = re.compile(r"(?i)![!>]\s*end_arg_table_"+FORTRAN_ID) -prefix_specs = [r"(?:recursive)", r"(?:pure)", r"(?:elemental)"] -prefix_spec_re = r"(?:{})?\s*".format('|'.join(prefix_specs)) -subname_re = r"(?i)subroutine\s*" -arglist_re = r"\s*(?:[(]\s*([^)]*)[)])?" -subroutine_re = re.compile(r"(?i)\s*"+prefix_spec_re+subname_re+FORTRAN_ID+arglist_re) -end_subroutine_re = re.compile(r"(?i)\s*end\s*"+subname_re+FORTRAN_ID+r"?") -use_re = re.compile(r"(?i)\s*use\s(?:,\s*intrinsic\s*::)?\s*only\s*:([^!]+)") -end_type_re = re.compile(r"(?i)\s*end\s*type\s+"+FORTRAN_ID+r"?") +# pylint: enable=wrong-import-position + +_COMMENT_RE = re.compile(r"!.*$") +_FIXED_COMMENT_RE = re.compile(r"(?i)([C*]|(?:[ ]{0,4}!))") +_PROGRAM_RE = re.compile(r"(?i)\s*program\s+"+FORTRAN_ID) +_ENDPROGRAM_RE = re.compile(r"(?i)\s*end\s*program\s+"+FORTRAN_ID+r"?") +_MODULE_RE = re.compile(r"(?i)\s*module\s+"+FORTRAN_ID) +_ENDMODULE_RE = re.compile(r"(?i)\s*end\s*module\s+"+FORTRAN_ID+r"?") +_CONTAINS_RE = re.compile(r"(?i)\s*contains") +_CONTINUE_RE = re.compile(r"(?i)&\s*(!.*)?$") +_FIXED_CONTINUE_RE = re.compile(r"(?i) [^0 ]") +_BLANK_RE = re.compile(r"\s+") +_ARG_TABLE_START_RE = re.compile(r"(?i)\s*![!>]\s*(?:\\section)?\s*arg_table_"+FORTRAN_ID) +_PREFIX_SPECS = [r"(?:recursive)", r"(?:pure)", r"(?:elemental)"] +_PREFIX_SPEC = r"(?:{})?\s*".format('|'.join(_PREFIX_SPECS)) +_SUBNAME_SPEC = r"subroutine\s*" +_ARGLIST_SPEC = r"\s*(?:[(]\s*([^)]*)[)])?" +_SUBROUTINE_SPEC = r"(?i)\s*"+_PREFIX_SPEC+_SUBNAME_SPEC+FORTRAN_ID+_ARGLIST_SPEC +_SUBROUTINE_RE = re.compile(_SUBROUTINE_SPEC) +_END_SUBROUTINE_RE = re.compile(r"(?i)\s*end\s*"+_SUBNAME_SPEC+FORTRAN_ID+r"?") +_USE_RE = re.compile(r"(?i)\s*use\s(?:,\s*intrinsic\s*::)?\s*only\s*:([^!]+)") +_END_TYPE_RE = re.compile(r"(?i)\s*end\s*type(?:\s+"+FORTRAN_ID+r")?") +_INTENT_STMT_RE = re.compile(r"(?i),\s*intent\s*[(]") ######################################################################## @@ -103,13 +114,12 @@ def line_statements(line): def read_statements(pobj, statements=None): """Retrieve the next line and break it into statements""" while (statements is None) or (sum([len(x) for x in statements]) == 0): - nline, nline_num = pobj.next_line() + nline, _ = pobj.next_line() if nline is None: statements = None break - else: - statements = line_statements(nline) # End if + statements = line_statements(nline) # End while return statements @@ -146,9 +156,9 @@ def scan_fixed_line(line, in_single_char, in_double_char, context): """ # Check if comment or continue statement - cmatch = fixed_comment_re.match(line) - is_comment = cmatch is not None - is_continue = fixed_continue_re.match(line) is not None + cmatch = _FIXED_COMMENT_RE.match(line) + is_comment = cmatch is not None + is_continue = _FIXED_CONTINUE_RE.match(line) is not None # A few sanity checks if (in_single_char or in_double_char) and (not is_continue): raise ParseSyntaxError("Cannot start line in character context if not a continued line", context=context) @@ -173,11 +183,11 @@ def scan_fixed_line(line, in_single_char, in_double_char, context): last_ind = len(line.rstrip()) - 1 # Process the line while index <= last_ind: - blank = blank_re.match(line[index:]) + blank = _BLANK_RE.match(line[index:]) if blank is not None: index = index + len(blank.group(0)) - 1 # +1 at end of loop elif in_single_char: - if line[index:min(index+1,last_ind)] == "''": + if line[index:min(index+1, last_ind)] == "''": # Embedded single quote index = index + 1 # +1 and end of loop elif line[index] == "'": @@ -185,7 +195,7 @@ def scan_fixed_line(line, in_single_char, in_double_char, context): # End if # End if (just ignore any other character) elif in_double_char: - if line[index:min(index+1,last_ind)] == '""': + if line[index:min(index+1, last_ind)] == '""': # Embedded double quote index = index + 1 # +1 and end of loop elif line[index] == '"': @@ -243,10 +253,10 @@ def scan_free_line(line, in_continue, in_single_char, in_double_char, context): ParseSyntaxError: Cannot end non-continued line in a character context, in >>> scan_free_line("int :: inde&", False, True, False, ParseContext()) Traceback (most recent call last): - ParseSyntaxError: Cannot start line in character context if not a continued line, in + parse_source.ParseSyntaxError: Cannot start line in character context if not a continued line, in >>> scan_free_line("int :: inde&", True, True, True, ParseContext()) Traceback (most recent call last): - ParseSyntaxError: Cannot be both in an apostrophe character context and a quote character context, in + parse_source.ParseSyntaxError: Cannot be both in an apostrophe character context and a quote character context, in """ # A few sanity checks @@ -266,28 +276,27 @@ def scan_free_line(line, in_continue, in_single_char, in_double_char, context): if line.lstrip()[0] == '&': if not in_continue: raise ParseSyntaxError("Cannot begin line with continue character (&), not on continued line", context=context) - else: - continue_in_col = line.find('&') - index = continue_in_col + 1 # End if + continue_in_col = line.find('&') + index = continue_in_col + 1 # Process rest of line while index <= last_ind: - blank = blank_re.match(line[index:]) + blank = _BLANK_RE.match(line[index:]) if blank is not None: index = index + len(blank.group(0)) - 1 # +1 at end of loop elif in_single_char: - if line[index:min(index+1,last_ind)] == "''": + if line[index:min(index+1, last_ind)] == "''": # Embedded single quote index = index + 1 # +1 and end of loop elif line[index] == "'": in_single_char = False - elif (line[index] == '&'): + elif line[index] == '&': if index == last_ind: continue_out_col = index # End if # End if (just ignore any other character) elif in_double_char: - if line[index:min(index+1,last_ind)] == '""': + if line[index:min(index+1, last_ind)] == '""': # Embedded double quote index = index + 1 # +1 and end of loop elif line[index] == '"': @@ -310,18 +319,21 @@ def scan_free_line(line, in_continue, in_single_char, in_double_char, context): elif line[index] == '&': # If we got here, we are not in a character context, note continue # First make sure this is a valid continue - match = continue_re.match(line[index:]) + match = _CONTINUE_RE.match(line[index:]) if match is not None: continue_out_col = index else: - raise ParseSyntaxError("Invalid continue, ampersand not followed by comment character", context=context) + errmsg = ("Invalid continue, ampersand not followed by " + "comment character") + raise ParseSyntaxError(errmsg, context=context) # End if # End if index = index + 1 # End while # A final check if (in_single_char or in_double_char) and (continue_out_col < 0): - raise ParseSyntaxError("Cannot end non-continued line in a character context", context=context) + errmsg = "Cannot end non-continued line in a character context" + raise ParseSyntaxError(errmsg, context=context) return continue_in_col, continue_out_col, in_single_char, in_double_char, comment_col @@ -336,132 +348,146 @@ def read_file(filename, preproc_defs=None, logger=None): preproc_status = PreprocStack() if not os.path.exists(filename): raise IOError("read_file: file, '{}', does not exist".format(filename)) - else: - # We need special rules for fixed-form source - fixed_form = filename[-2:].lower() == '.f' - # Read all lines of the file at once - with open(filename, 'r') as file: - file_lines = file.readlines() - for index in xrange(len(file_lines)): - file_lines[index] = file_lines[index].rstrip('\n').rstrip() - # End for - # End with - # create a parse object and context for this file - pobj = ParseObject(filename, file_lines) - continue_col = -1 # Active continue column - in_schar = False # Single quote character context - in_dchar = False # Double quote character context - prev_line = None - prev_line_num = -1 - curr_line, curr_line_num = pobj.curr_line() - while curr_line is not None: - # Skip empty lines and comment-only lines - skip_line = False - if len(curr_line.strip()) == 0: - skip_line = True - elif fixed_form and (fixed_comment_re.match(curr_line) is not None): - skip_line = True - elif curr_line.lstrip()[0] == '!': - skip_line = True - # End if - if skip_line: - curr_line, curr_line_num = pobj.next_line() - continue - # End if - # Handle preproc issues - if preproc_status.process_line(curr_line, preproc_defs, pobj, logger): + # end if + # We need special rules for fixed-form source + fixed_form = filename[-2:].lower() == '.f' + # Read all lines of the file at once + with open(filename, 'r') as file: + file_lines = file.readlines() + for index, line in enumerate(file_lines): + file_lines[index] = line.rstrip('\n').rstrip() + # End for + # End with + # create a parse object and context for this file + pobj = ParseObject(filename, file_lines) + continue_col = -1 # Active continue column + in_schar = False # Single quote character context + in_dchar = False # Double quote character context + prev_line = None + prev_line_num = -1 + curr_line, curr_line_num = pobj.curr_line() + while curr_line is not None: + # Skip empty lines and comment-only lines + skip_line = False + if len(curr_line.strip()) == 0: + skip_line = True + elif (fixed_form and + (_FIXED_COMMENT_RE.match(curr_line) is not None)): + skip_line = True + elif curr_line.lstrip()[0] == '!': + skip_line = True + # End if + if skip_line: + curr_line, curr_line_num = pobj.next_line() + continue + # End if + # Handle preproc issues + if preproc_status.process_line(curr_line, preproc_defs, pobj, logger): + pobj.write_line(curr_line_num, "") + curr_line, curr_line_num = pobj.next_line() + continue + # End if + if not preproc_status.in_true_region(): + # Special case to allow CCPP comment statements in False + # regions to find DDT and module table code + if (curr_line[0:2] != '!!') and (curr_line[0:2] != '!>'): pobj.write_line(curr_line_num, "") curr_line, curr_line_num = pobj.next_line() continue # End if - if not preproc_status.in_true_region(): - # Special case to allow CCPP comment statements in False - # regions to find DDT and module table code - if (curr_line[0:2] != '!!') and (curr_line[0:2] != '!>'): - pobj.write_line(curr_line_num, "") - curr_line, curr_line_num = pobj.next_line() - continue - # End if + # End if + # scan the line for properties + if fixed_form: + res = scan_fixed_line(curr_line, in_schar, in_dchar, pobj) + cont_in_col, in_schar, in_dchar, comment_col = res + continue_col = cont_in_col # No warning in fixed form + cont_out_col = -1 + if (comment_col < 0) and (continue_col < 0): + # Real statement, grab the line # in case is continued + prev_line_num = curr_line_num + prev_line = None + # End if + else: + res = scan_free_line(curr_line, (continue_col >= 0), + in_schar, in_dchar, pobj) + cont_in_col, cont_out_col, in_schar, in_dchar, comment_col = res + # End if + # If in a continuation context, move this line to previous + if continue_col >= 0: + if fixed_form and (prev_line is None): + prev_line = pobj.peek_line(prev_line_num)[0:72] + # End if + if prev_line is None: + raise ParseInternalError("No prev_line to continue", + context=pobj) # End if - # scan the line for properties + sindex = max(cont_in_col+1, 0) if fixed_form: - res = scan_fixed_line(curr_line, in_schar, in_dchar, pobj) - cont_in_col, in_schar, in_dchar, comment_col = res - continue_col = cont_in_col # No warning in fixed form - cont_out_col = -1 - if (comment_col < 0) and (continue_col < 0): - # Real statement, grab the line # in case is continued - prev_line_num = curr_line_num - prev_line = None - # End if + sindex = 6 + eindex = 72 + elif cont_out_col > 0: + eindex = cont_out_col else: - res = scan_free_line(curr_line, (continue_col >= 0), - in_schar, in_dchar, pobj) - cont_in_col, cont_out_col, in_schar, in_dchar, comment_col = res + eindex = len(curr_line) # End if - # If in a continuation context, move this line to previous - if continue_col >= 0: - if fixed_form and (prev_line is None): - prev_line = pobj.peek_line(prev_line_num)[0:72] - # End if - if prev_line is None: - raise ParseInternalError("No prev_line to continue", context=pobj) - # End if - sindex = max(cont_in_col+1, 0) - if fixed_form: - sindex = 6 - eindex = 72 - elif cont_out_col > 0: - eindex = cont_out_col - else: - eindex = len(curr_line) - # End if - prev_line = prev_line + curr_line[sindex:eindex] - if fixed_form: - prev_line = prev_line.rstrip() - # End if - # Rewrite the file's lines - pobj.write_line(prev_line_num, prev_line) - pobj.write_line(curr_line_num, "") - if (not fixed_form) and (cont_out_col < 0): - # We are done with this line, reset prev_line - prev_line = None - prev_line_num = -1 - # End if + prev_line = prev_line + curr_line[sindex:eindex] + if fixed_form: + prev_line = prev_line.rstrip() # End if - continue_col = cont_out_col - if (continue_col >= 0) and (prev_line is None): - # We need to set up prev_line as it is continued - prev_line = curr_line[0:continue_col] - if not (in_schar or in_dchar): - prev_line = prev_line.rstrip() - # End if - prev_line_num = curr_line_num + # Rewrite the file's lines + pobj.write_line(prev_line_num, prev_line) + pobj.write_line(curr_line_num, "") + if (not fixed_form) and (cont_out_col < 0): + # We are done with this line, reset prev_line + prev_line = None + prev_line_num = -1 # End if - curr_line, curr_line_num = pobj.next_line() - # End while - return pobj + # End if + continue_col = cont_out_col + if (continue_col >= 0) and (prev_line is None): + # We need to set up prev_line as it is continued + prev_line = curr_line[0:continue_col] + if not (in_schar or in_dchar): + prev_line = prev_line.rstrip() + # End if + prev_line_num = curr_line_num + # End if + curr_line, curr_line_num = pobj.next_line() + # End while + return pobj ######################################################################## -def parse_use_statement(type_dict, statement, pobj, logger): - umatch = use_re.match(statement) +def parse_use_statement(statement, logger): + """Return True iff is a use statement""" + umatch = _USE_RE.match(statement) if umatch is None: return False - else: - print("use = {}".format(umatch.group(1))) - return True # End if + if logger: + logger.debug("use = {}".format(umatch.group(1))) + # end if + return True ######################################################################## -def is_comment_statement(statement, logger): +def is_dummy_argument_statement(statement): + """Return True iff is a dummy argument declaration""" + return _INTENT_STMT_RE.search(statement) is not None + +######################################################################## + +def is_comment_statement(statement): + """Return True iff is a Fortran comment""" return statement.lstrip()[0] == '!' ######################################################################## def parse_type_def(statements, type_def, mod_name, pobj, logger): - psrc = ParseSource(mod_name, 'DDT', pobj) + """Parse a type definition from and return the + remaining statements along with a MetadataTable object representing + the type's variables.""" + psrc = ParseSource(mod_name, 'ddt', pobj) seen_contains = False mheader = None var_dict = VarDictionary(type_def[0]) @@ -470,21 +496,23 @@ def parse_type_def(statements, type_def, mod_name, pobj, logger): while len(statements) > 0: statement = statements.pop(0) # End program or module - pmatch = end_type_re.match(statement) + pmatch = _END_TYPE_RE.match(statement) if pmatch is not None: # We hit the end of the type, make a header - mheader = MetadataHeader(title=type_def[0], type_in='DDT', - module=mod_name, var_dict=var_dict, - logger=logger) + mheader = MetadataTable(table_name_in=type_def[0], + table_type_in='ddt', + module=mod_name, var_dict=var_dict, + logger=logger) inspec = False elif is_contains_statement(statement, inspec): seen_contains = True elif not seen_contains: # Comment of variable - if ((not is_comment_statement(statement, logger)) and - (not parse_use_statement({}, statement, pobj, logger))): - vars = parse_fortran_var_decl(statement, psrc, logger=logger) - for var in vars: + if ((not is_comment_statement(statement)) and + (not parse_use_statement(statement, logger))): + dvars = parse_fortran_var_decl(statement, psrc, + logger=logger) + for var in dvars: var_dict.add_variable(var) # End for # End if @@ -510,41 +538,67 @@ def parse_preamble_data(statements, pobj, spec_name, endmatch, logger): var_dict = VarDictionary(spec_name) psrc = ParseSource(spec_name, 'MODULE', pobj) active_table = None + if logger is not None: + ctx = context_string(pobj, nodir=True) + msg = "Parsing preamble variables of {}{}" + logger.debug(msg.format(spec_name, ctx)) + # End if while inspec and (statements is not None): while len(statements) > 0: statement = statements.pop(0) # End program or module pmatch = endmatch.match(statement) - asmatch = arg_table_start_re.match(statement) + asmatch = _ARG_TABLE_START_RE.match(statement) type_def = fortran_type_definition(statement) if asmatch is not None: - active_table = asmatch.group(1).lower() - elif (pmatch is not None) or is_contains_statement(statement, inspec): + active_table = asmatch.group(1) + elif (pmatch is not None) or is_contains_statement(statement, + inspec): # We are done with the specification inspec = False - if len(var_dict.variable_list()) > 0: - mheader = MetadataHeader(title=spec_name, type_in='MODULE', - module=spec_name, - var_dict=var_dict, logger=logger) - mheaders.append(mheader) - # End if + # Put statement back so caller knows where we are + statements.insert(0, statement) + # Add the header (even if we found no variables) + mheader = MetadataTable(table_name_in=spec_name, + table_type_in='module', + module=spec_name, + var_dict=var_dict, logger=logger) + mheaders.append(mheader) + if logger is not None: + ctx = context_string(pobj, nodir=True) + msg = 'Adding header {}{}' + logger.debug(msg.format(mheader.table_name, ctx)) break - elif (type_def is not None) and (type_def[0].lower() == active_table): + elif ((type_def is not None) and + (type_def[0].lower() == active_table.lower())): + # Put statement back so caller knows where we are + statements.insert(0, statement) statements, ddt = parse_type_def(statements, type_def, spec_name, pobj, logger) + if ddt is None: + ctx = context_string(pobj, nodir=True) + msg = "No DDT found at '{}'{}" + raise CCPPError(msg.format(statement, ctx)) + # End if mheaders.append(ddt) + if logger is not None: + ctx = context_string(pobj, nodir=True) + msg = 'Adding DDT {}{}' + logger.debug(msg.format(ddt.table_name, ctx)) + # End if active_table = None - else: + elif active_table is not None: # We should have a variable definition to add - if ((not is_comment_statement(statement, logger)) and - (not parse_use_statement({}, statement, pobj, logger)) and - (active_table == spec_name)): - vars = parse_fortran_var_decl(statement, psrc, logger=logger) - for var in vars: + if ((not is_comment_statement(statement)) and + (not parse_use_statement(statement, logger)) and + (active_table.lower() == spec_name.lower())): + dvars = parse_fortran_var_decl(statement, psrc, + logger=logger) + for var in dvars: var_dict.add_variable(var) # End for # End if - # End if + # End if (else we are not in an active table so just skip) # End while if inspec and (len(statements) == 0): statements = read_statements(pobj) @@ -563,55 +617,88 @@ def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): # Find the subroutine line, should be first executable statement inpreamble = False insub = True + if logger is not None: + ctx = context_string(pobj, nodir=True) + msg = "Parsing specification of {}{}" + logger.debug(msg.format(table_name, ctx)) + # End if + ctx = context_string(pobj) # Save initial context with directory + vdict = None # Initialized when we parse the subroutine arguments while insub and (statements is not None): - while len(statements) > 0: + while statements: statement = statements.pop(0) - smatch = subroutine_re.match(statement) - esmatch = end_subroutine_re.match(statement) - pmatch = endmodule_re.match(statement) - asmatch = arg_table_start_re.match(statement) + smatch = _SUBROUTINE_RE.match(statement) + esmatch = _END_SUBROUTINE_RE.match(statement) + pmatch = _ENDMODULE_RE.match(statement) + asmatch = _ARG_TABLE_START_RE.match(statement) if asmatch is not None: # We have run off the end of something, hope that is okay # Put this statement back for the caller to deal with statements.insert(0, statement) insub = False break - elif (pmatch is not None): + # End if + if pmatch is not None: # We have run off the end of the module, hope that is okay pobj.leave_region('MODULE', region_name=spec_name) insub = False break - elif smatch is not None: + # End if + if smatch is not None: scheme_name = smatch.group(1) - inpreamble = scheme_name == table_name + inpreamble = scheme_name.lower() == table_name.lower() if inpreamble: if smatch.group(2) is not None: - scheme_args = [x.strip().lower() for x in smatch.group(2).split(',')] + smstr = smatch.group(2).strip() + if len(smstr) > 0: + smlist = smstr.strip().split(',') + else: + smlist = list() + # End if + scheme_args = [x.strip().lower() for x in smlist] else: scheme_args = list() # End if - scheme_set = set(scheme_args) - var_dict = VarDictionary(scheme_name) - psrc = ParseSource(scheme_name, 'SCHEME', pobj) + # Create a dict template with all the scheme's arguments + # in the correct order + vdict = OrderedDict() + for arg in scheme_args: + if len(arg) == 0: + errmsg = 'Empty argument{}' + raise ParseInternalError(errmsg.format(pobj)) + # End if + if arg in vdict: + errmsg = 'Duplicate dummy argument, {}' + raise ParseSyntaxError(errmsg.format(arg), + context=pobj) + # End if + vdict[arg] = None + # End for + psrc = ParseSource(scheme_name, 'scheme', pobj) # End if elif inpreamble: # Process a preamble statement (use or argument declaration) if esmatch is not None: inpreamble = False insub = False - elif ((not is_comment_statement(statement, logger)) and - (not parse_use_statement({}, statement, pobj, logger)) and - ('intent' in statement.lower())): - vars = parse_fortran_var_decl(statement, psrc, logger=logger) - for var in vars: + elif ((not is_comment_statement(statement)) and + (not parse_use_statement(statement, logger)) and + is_dummy_argument_statement(statement)): + dvars = parse_fortran_var_decl(statement, psrc, + logger=logger) + for var in dvars: lname = var.get_prop_value('local_name').lower() - if lname in scheme_set: - scheme_set.remove(lname) + if lname in vdict: + if vdict[lname] is not None: + emsg = "Error: duplicate dummy argument, {}" + raise ParseSyntaxError(emsg.format(lname), + context=pobj) + # End if + vdict[lname] = var else: raise ParseSyntaxError('dummy argument', token=lname, context=pobj) # End if - var_dict.add_variable(var) # End for # End if # End if @@ -620,10 +707,26 @@ def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): statements = read_statements(pobj) # End if # End while + # Check for missing declarations + missing = list() + if vdict is None: + errmsg = 'Subroutine, {}, not found{}' + raise CCPPError(errmsg.format(scheme_name, ctx)) + # End if + for lname in vdict.keys(): + if vdict[lname] is None: + missing.append(lname) + # End if + # End for + if len(missing) > 0: + errmsg = 'Missing local_variables, {} in {}' + raise CCPPError(errmsg.format(missing, scheme_name)) + # End if + var_dict = VarDictionary(scheme_name, variables=vdict) if (scheme_name is not None) and (var_dict is not None): - mheader = MetadataHeader(title=scheme_name, type_in='SCHEME', - module=spec_name, var_dict=var_dict, - logger=logger) + mheader = MetadataTable(table_name_in=scheme_name, + table_type_in='scheme', module=spec_name, + var_dict=var_dict, logger=logger) # End if return statements, mheader @@ -632,37 +735,53 @@ def parse_scheme_metadata(statements, pobj, spec_name, table_name, logger): def is_contains_statement(statement, in_module): "Return True iff is an executable Fortran statement" # Fill this in when we need to parse programs or subroutines - if in_module and (contains_re.match(statement.strip()) is not None): - return True - else: - return False + return in_module and (_CONTAINS_RE.match(statement.strip()) is not None) + +######################################################################## + +def duplicate_header(header, duplicate): + """Create and return an 'Duplicate header' error string""" + ctx = duplicate.start_context() + octx = header.start_context() + errmsg = 'Duplicate header, {}{}'.format(header.name, ctx) + if len(octx) > 0: + errmsg = errmsg + ', original{}'.format(octx) # End if + return errmsg ######################################################################## -def parse_specification(pobj, statements, mod_name=None, prog_name=None, logger=None): - "Parse specification part of a module or (sub)program" +def parse_specification(pobj, statements, mod_name=None, + prog_name=None, logger=None): + """Parse specification part of a module or (sub)program""" if (mod_name is not None) and (prog_name is not None): raise ParseInternalError(" and cannot both be used") - elif mod_name is not None: + # end if + if mod_name is not None: spec_name = mod_name - endmatch = endmodule_re + endmatch = _ENDMODULE_RE inmod = True elif prog_name is not None: spec_name = prog_name - endmatch = endprogram_re + endmatch = _ENDPROGRAM_RE inmod = False else: raise ParseInternalError("One of or must be used") # End if + if logger is not None: + ctx = context_string(pobj, nodir=True) + msg = "Parsing specification of {}{}" + logger.debug(msg.format(spec_name, ctx)) + # End if + inspec = True - mheaders = list() + mtables = list() while inspec and (statements is not None): while len(statements) > 0: statement = statements.pop(0) # End program or module pmatch = endmatch.match(statement) - asmatch = arg_table_start_re.match(statement) + asmatch = _ARG_TABLE_START_RE.match(statement) if pmatch is not None: # We never found a contains statement inspec = False @@ -670,11 +789,22 @@ def parse_specification(pobj, statements, mod_name=None, prog_name=None, logger= elif asmatch is not None: # Put table statement back to re-read statements.insert(0, statement) - statements, new_hdrs = parse_preamble_data(statements, + statements, new_tbls = parse_preamble_data(statements, pobj, spec_name, endmatch, logger) - for hdr in new_hdrs: - mheaders.append(hdr) + for tbl in new_tbls: + title = tbl.table_name + if title in mtables: + errmsg = duplicate_header(mtables[title], tbl) + raise CCPPError(errmsg) + # end if + if logger is not None: + ctx = tbl.start_context() + mtype = tbl.table_type + msg = "Adding metadata from {}, {}{}" + logger.debug(msg.format(mtype, title, ctx)) + # End if + mtables.append(tbl) # End if inspec = pobj.in_region('MODULE', region_name=mod_name) break @@ -687,20 +817,29 @@ def parse_specification(pobj, statements, mod_name=None, prog_name=None, logger= statements = read_statements(pobj) # End if # End while - return statements, mheaders + return statements, mtables ######################################################################## def parse_program(pobj, statements, logger=None): + """Parse a Fortran PROGRAM and return any leftover statements + and metadata tables encountered in the PROGRAM.""" # The first statement should be a program statement, grab the name - pmatch = program_re.match(statements[0]) + pmatch = _PROGRAM_RE.match(statements[0]) if pmatch is None: raise ParseSyntaxError('PROGRAM statement', statements[0]) # End if prog_name = pmatch.group(1) pobj.enter_region('PROGRAM', region_name=prog_name, nested_ok=False) + if logger is not None: + ctx = context_string(pobj, nodir=True) + msg = "Parsing Fortran program, {}{}" + logger.debug(msg.format(prog_name, ctx)) + # End if # After the program name is the specification part - statements, mheaders = parse_specification(pobj, statements[1:], prog_name=prog_name, logger=logger) + statements, mtables = parse_specification(pobj, statements[1:], + prog_name=prog_name, + logger=logger) # We really cannot have tables inside a program's executable section # Just read until end statements = read_statements(pobj, statements) @@ -709,7 +848,7 @@ def parse_program(pobj, statements, logger=None): while len(statements) > 0: statement = statements.pop(0) # End program - pmatch = endprogram_re.match(statements) + pmatch = _ENDPROGRAM_RE.match(statement) if pmatch is not None: prog_name = pmatch.group(1) pobj.leave_region('PROGRAM', region_name=prog_name) @@ -720,30 +859,37 @@ def parse_program(pobj, statements, logger=None): statements = read_statements(pobj) # End if # End while - return statements, mheaders + return statements, mtables ######################################################################## def parse_module(pobj, statements, logger=None): + """Parse a Fortran MODULE and return any leftover statements + and metadata tables encountered in the MODULE.""" # The first statement should be a module statement, grab the name - pmatch = module_re.match(statements[0]) + pmatch = _MODULE_RE.match(statements[0]) if pmatch is None: raise ParseSyntaxError('MODULE statement', statements[0]) # End if mod_name = pmatch.group(1) pobj.enter_region('MODULE', region_name=mod_name, nested_ok=False) + if logger is not None: + ctx = context_string(pobj, nodir=True) + msg = "Parsing Fortran module, {}{}" + logger.debug(msg.format(mod_name, ctx)) + # End if # After the module name is the specification part - statements, mheaders = parse_specification(pobj, statements[1:], mod_name=mod_name, logger=logger) + statements, mtables = parse_specification(pobj, statements[1:], mod_name=mod_name, logger=logger) # Look for metadata tables statements = read_statements(pobj, statements) inmodule = pobj.in_region('MODULE', region_name=mod_name) active_table = None while inmodule and (statements is not None): - while len(statements) > 0: + while statements: statement = statements.pop(0) # End module - pmatch = endmodule_re.match(statement) - asmatch = arg_table_start_re.match(statement) + pmatch = _ENDMODULE_RE.match(statement) + asmatch = _ARG_TABLE_START_RE.match(statement) if asmatch is not None: active_table = asmatch.group(1) elif pmatch is not None: @@ -753,10 +899,22 @@ def parse_module(pobj, statements, logger=None): break elif active_table is not None: statements, mheader = parse_scheme_metadata(statements, pobj, - mod_name, active_table, + mod_name, + active_table, logger) if mheader is not None: - mheaders.append(mheader) + title = mheader.table_name + if title in mtables: + errmsg = duplicate_header(mtables[title], mheader) + raise CCPPError(errmsg) + # end if + if logger is not None: + mtype = mheader.table_type + ctx = mheader.start_context() + msg = "Adding metadata from {}, {}{}" + logger.debug(msg.format(mtype, title, ctx)) + # End if + mtables.append(mheader) # End if active_table = None inmodule = pobj.in_region('MODULE', region_name=mod_name) @@ -767,52 +925,57 @@ def parse_module(pobj, statements, logger=None): statements = read_statements(pobj) # End if # End while - return statements, mheaders + return statements, mtables ######################################################################## def parse_fortran_file(filename, preproc_defs=None, logger=None): - mheaders = list() - type_dict = {} + """Parse a Fortran file and return all metadata tables found.""" + mtables = list() pobj = read_file(filename, preproc_defs=preproc_defs, logger=logger) pobj.reset_pos() - curr_line, clo = pobj.curr_line() + curr_line, _ = pobj.curr_line() statements = line_statements(curr_line) - while (statements is not None) and (len(statements) > 0): + while statements is not None: + if not statements: + statements = read_statements(pobj) + # End if statement = statements.pop(0) - if program_re.match(statement) is not None: + if _PROGRAM_RE.match(statement) is not None: # push statement back so parse_program can use it statements.insert(0, statement) - statements, pheaders = parse_program(pobj, statements, logger=logger) - mheaders.extend(pheaders) - elif module_re.match(statement) is not None: + statements, ptables = parse_program(pobj, statements, logger=logger) + mtables.extend(ptables) + elif _MODULE_RE.match(statement) is not None: # push statement back so parse_module can use it statements.insert(0, statement) - statements, pheaders = parse_module(pobj, statements, logger=logger) - mheaders.extend(pheaders) + statements, ptables = parse_module(pobj, statements, logger=logger) + mtables.extend(ptables) # End if if (statements is not None) and (len(statements) == 0): statements = read_statements(pobj) # End if # End while - return mheaders + return mtables ######################################################################## if __name__ == "__main__": +# pylint: disable=ungrouped-imports import doctest doctest.testmod() from parse_tools import register_fortran_ddt_name - fpath = '/Users/goldy/scratch/foo' - fnames = ['GFS_PBL_generic.F90', 'GFS_rad_time_vary.fv3.F90', - 'GFS_typedefs.F90'] +# pylint: enable=ungrouped-imports + _FPATH = '/Users/goldy/scratch/foo' + _FNAMES = ['GFS_PBL_generic.F90', 'GFS_rad_time_vary.fv3.F90', + 'GFS_typedefs.F90'] register_fortran_ddt_name('GFS_control_type') register_fortran_ddt_name('GFS_data_type') - for fname in fnames: - fpathname = os.path.join(fpath, fname) + for fname in _FNAMES: + fpathname = os.path.join(_FPATH, fname) if os.path.exists(fpathname): mh = parse_fortran_file(fpathname, preproc_defs={'CCPP':1}) - for h in mh: + for header in mheader: print('{}: {}'.format(fname, h)) # End for # End if diff --git a/scripts/host_cap.py b/scripts/host_cap.py new file mode 100644 index 00000000..fcde1985 --- /dev/null +++ b/scripts/host_cap.py @@ -0,0 +1,573 @@ +#!/usr/bin/env python + +""" +Parse a host-model registry XML file and return the captured variables. +""" + +# Python library imports +import os +import os.path +# CCPP framework imports +from ccpp_suite import API +from ccpp_state_machine import CCPP_STATE_MACH +from constituents import ConstituentVarDict, CONST_DDT_NAME, CONST_DDT_MOD +from ddt_library import DDTLibrary +from file_utils import KINDS_MODULE +from metadata_table import MetadataTable +from metavar import Var, VarDictionary, CCPP_CONSTANT_VARS +from metavar import CCPP_LOOP_VAR_STDNAMES +from fortran_tools import FortranWriter +from parse_tools import CCPPError +from parse_tools import ParseObject, ParseSource, ParseContext + +############################################################################### +_HEADER = "cap for {host_model} calls to CCPP API" + +_SUBHEAD = ''' + subroutine {host_model}_ccpp_physics_{stage}({api_vars}) +''' + +_SUBFOOT = ''' + end subroutine {host_model}_ccpp_physics_{stage} +''' + +_API_SRC_NAME = "CCPP_API" + +_API_SOURCE = ParseSource(_API_SRC_NAME, "MODULE", + ParseContext(filename="host_cap.F90")) + +_SUITE_NAME_VAR = Var({'local_name':'suite_name', + 'standard_name':'suite_name', + 'intent':'in', 'type':'character', + 'kind':'len=*', 'units':'', 'protected':'True', + 'dimensions':'()'}, _API_SOURCE) + +_SUITE_PART_VAR = Var({'local_name':'suite_part', + 'standard_name':'suite_part', + 'intent':'in', 'type':'character', + 'kind':'len=*', 'units':'', 'protected':'True', + 'dimensions':'()'}, _API_SOURCE) + +# Used to prevent loop substitution lookups +_BLANK_DICT = VarDictionary(_API_SRC_NAME) + +############################################################################### +def suite_part_list(suite, stage): +############################################################################### + """Return a list of all the suite parts for this stage""" + run_stage = stage == 'run' + if run_stage: + spart_list = list() + for spart in suite.groups: + if suite.is_run_group(spart): + spart_list.append(spart) + # End if + # End for + else: + spart_list = [suite.phase_group(stage)] + # End if + return spart_list + +############################################################################### +def constituent_register_subname(host_model): +############################################################################### + """Return the name of the subroutine used to register (initialize) the + constituents for this run. + Because this is a user interface API function, the name is fixed.""" + return "{}_ccpp_register_constituents".format(host_model.name) + +############################################################################### +def constituent_num_consts_funcname(host_model): +############################################################################### + """Return the name of the function to return the number of + constituents for this run. + Because this is a user interface API function, the name is fixed.""" + return "{}_ccpp_number_constituents".format(host_model.name) + +############################################################################### +def constituent_copyin_subname(host_model): +############################################################################### + """Return the name of the subroutine to copy constituent fields to the + host model. + Because this is a user interface API function, the name is fixed.""" + return "{}_ccpp_gather_constituents".format(host_model.name) + +############################################################################### +def constituent_copyout_subname(host_model): +############################################################################### + """Return the name of the subroutine to update constituent fields from + the host model. + Because this is a user interface API function, the name is fixed.""" + return "{}_ccpp_update_constituents".format(host_model.name) + +############################################################################### +def unique_local_name(loc_name, host_model): +############################################################################### + """Create a unique local name based on the local_name property, + for a variable with standard name, . + If is an unique local name (not in ), + simply return that. If not, create one and return that.""" + new_name = host_model.find_local_name(loc_name) is not None + if new_name: + new_lname = host_model.new_internal_variable_name(prefix=loc_name) + else: + new_lname = loc_name + # end if + return new_lname + +############################################################################### +def constituent_model_object_name(host_model): +############################################################################### + """Return the variable name of the object which holds the constiteunt + medata and field information.""" + hstr = "{}_constituents_obj".format(host_model.name) + return unique_local_name(hstr, host_model) + +############################################################################### +def constituent_model_const_stdnames(host_model): +############################################################################### + """Return the name of the array of constituent standard names""" + hstr = "{}_model_const_stdnames".format(host_model.name) + return unique_local_name(hstr, host_model) + +############################################################################### +def constituent_model_const_indices(host_model): +############################################################################### + """Return the name of the array of constituent field array indices""" + hstr = "{}_model_const_indices".format(host_model.name) + return unique_local_name(hstr, host_model) + +############################################################################### +def add_constituent_vars(cap, host_model, suite_list, logger): +############################################################################### + """Create a DDT library containing array reference variables + for each constituent field for all suites in . + Create and return a dictionary containing an index variable for each of the + constituents as well as the variables from the DDT object. + Also, write declarations for these variables to . + Since the constituents are in a DDT (ccpp_constituent_properties_t), + create a metadata table with the required information, then parse it + to create the dictionary. + """ + # First create a MetadataTable for the constituents DDT + stdname_layer = "ccpp_constituents_num_layer_consts" + stdname_interface = "ccpp_constituents_num_interface_consts" + stdname_2d = "ccpp_constituents_num_2d_consts" + horiz_dim = "horizontal_dimension" + vert_layer_dim = "vertical_layer_dimension" + vert_interface_dim = "vertical_interface_dimension" + array_layer = "vars_layer" + array_interface = "vars_interface" + array_2d = "vars_2d" + # Table preamble (leave off ccpp-table-properties header) + ddt_mdata = [ + #"[ccpp-table-properties]", + " name = {}".format(CONST_DDT_NAME), " type = ddt", + "[ccpp-arg-table]", + " name = {}".format(CONST_DDT_NAME), " type = ddt", + "[ num_layer_vars ]", + " standard_name = {}".format(stdname_layer), + " units = count", " dimensions = ()", " type = integer", + "[ num_interface_vars ]", + " standard_name = {}".format(stdname_interface), + " units = count", " dimensions = ()", " type = integer", + "[ num_2d_vars ]", + " standard_name = {}".format(stdname_2d), + " units = count", " dimensions = ()", " type = integer", + "[ {} ]".format(array_layer), + " standard_name = ccpp_constituents_array_of_layer_consts", + " units = none", + " dimensions = ({}, {}, {})".format(horiz_dim, vert_layer_dim, + stdname_layer), + " type = real", " kind = kind_phys", + "[ {} ]".format(array_interface), + " standard_name = ccpp_constituents_array_of_interface_consts", + " units = none", + " dimensions = ({}, {}, {})".format(horiz_dim, + vert_interface_dim, + stdname_interface), + " type = real", " kind = kind_phys", + "[ {} ]".format(array_2d), + " standard_name = ccpp_constituents_array_of_2d_consts", + " units = none", + " dimensions = ({}, {})".format(horiz_dim, stdname_2d), + " type = real", " kind = kind_phys"] + # Add entries for each constituent (once per standard name) + const_stdnames = set() + for suite in suite_list: + if logger is not None: + lmsg = "Adding constituents from {} to {}" + logger.debug(lmsg.format(suite.name, host_model.name)) + # end if + scdict = suite.constituent_dictionary() + for cvar in scdict.variable_list(): + std_name = cvar.get_prop_value('standard_name') + if std_name not in const_stdnames: + # Add a metadata entry for this constituent + # Check dimensions and figure vertical dimension + # Currently, we only support variables with first dimension, + # horizontal_dimension, and second (optional) dimension, + # vertical_layer_dimension or vertical_interface_dimension + dims = cvar.get_dimensions() + if (len(dims) < 1) or (len(dims) > 2): + emsg = "Unsupported constituent dimensions, '{}'" + dimstr = "({})".format(", ".join(dims)) + raise CCPPError(emsg.format(dimstr)) + # end if + hdim = dims[0].split(':')[-1] + if hdim != 'horizontal_dimension': + emsg = "Unsupported first constituent dimension, '{}', " + emsg += "must be 'horizontal_dimension'" + raise CCPPError(emsg.format(hdim)) + # end if + if len(dims) > 1: + vdim = dims[1].split(':')[-1] + if vdim == vert_layer_dim: + cvar_array_name = array_layer + elif vdim == vert_interface_dim: + cvar_array_name = array_interface + else: + emsg = "Unsupported vertical constituent dimension, " + emsg += "'{}', must be '{}' or '{}'" + raise CCPPError(emsg.format(vdim, vert_layer_dim, + vert_interface_dim)) + # end if + else: + cvar_array_name = array_2d + # end if + # First, create an index variable for + ind_std_name = "index_of_{}".format(std_name) + loc_name = "{}(:,:,{})".format(cvar_array_name, ind_std_name) + ddt_mdata.append("[ {} ]".format(loc_name)) + ddt_mdata.append(" standard_name = {}".format(std_name)) + units = cvar.get_prop_value('units') + ddt_mdata.append(" units = {}".format(units)) + dimstr = "({})".format(", ".join(dims)) + ddt_mdata.append(" dimensions = {}".format(dimstr)) + vtype = cvar.get_prop_value('type') + vkind = cvar.get_prop_value('kind') + ddt_mdata.append(" type = {} | kind = {}".format(vtype, vkind)) + const_stdnames.add(std_name) + # end if + # end for + # end for + # Parse this table using a fake filename + parse_obj = ParseObject("{}_constituent_mod.meta".format(host_model.name), + ddt_mdata) + ddt_table = MetadataTable(parse_object=parse_obj, logger=logger) + ddt_name = ddt_table.sections()[0].title + ddt_lib = DDTLibrary('{}_constituent_ddtlib'.format(host_model.name), + ddts=ddt_table.sections(), logger=logger) + # A bit of cleanup + del parse_obj + del ddt_mdata + # Now, create the "host constituent module" dictionary + const_dict = VarDictionary("{}_constituents".format(host_model.name), + parent_dict=host_model) + # Add in the constituents object + prop_dict = {'standard_name' : "ccpp_model_constituents_object", + 'local_name' : constituent_model_object_name(host_model), + 'dimensions' : '()', 'units' : "None", 'ddt_type' : ddt_name} + const_var = Var(prop_dict, _API_SOURCE) + const_var.write_def(cap, 1, const_dict) + ddt_lib.collect_ddt_fields(const_dict, const_var) + # Declare variable for the constituent standard names array + max_csname = max([len(x) for x in const_stdnames]) if const_stdnames else 0 + num_const_fields = len(const_stdnames) + cs_stdname = constituent_model_const_stdnames(host_model) + const_list = sorted(const_stdnames) + if const_list: + const_strs = ['"{}{}"'.format(x, ' '*(max_csname - len(x))) + for x in const_list] + cs_stdame_initstr = " = (/ " + ", ".join(const_strs) + " /)" + else: + cs_stdame_initstr = "" + # end if + cap.write("character(len={}) :: {}({}){}".format(max_csname, cs_stdname, + num_const_fields, + cs_stdame_initstr), 1) + # Declare variable for the constituent standard names array + array_name = constituent_model_const_indices(host_model) + cap.write("integer :: {}({}) = -1".format(array_name, num_const_fields), 1) + # Add individual variables for each index var to the const_dict + for index, std_name in enumerate(const_list): + ind_std_name = "index_of_{}".format(std_name) + ind_loc_name = "{}({})".format(array_name, index + 1) + prop_dict = {'standard_name' : ind_std_name, + 'local_name' : ind_loc_name, 'dimensions' : '()', + 'units' : 'index', 'protected' : "True", + 'type' : 'integer', 'kind' : ''} + ind_var = Var(prop_dict, _API_SOURCE) + const_dict.add_variable(ind_var) + # end for + # Add vertical dimensions for DDT call strings + pver = host_model.find_variable(standard_name=vert_layer_dim, + any_scope=False) + if pver is not None: + prop_dict = {'standard_name' : vert_layer_dim, + 'local_name' : pver.get_prop_value('local_name'), + 'units' : 'count', 'type' : 'integer', + 'protected' : 'True', 'dimensions' : '()'} + if const_dict.find_variable(standard_name=vert_layer_dim, + any_scope=False) is None: + ind_var = Var(prop_dict, _API_SOURCE) + const_dict.add_variable(ind_var) + # end if + # end if + pver = host_model.find_variable(standard_name=vert_interface_dim, + any_scope=False) + if pver is not None: + prop_dict = {'standard_name' : vert_interface_dim, + 'local_name' : pver.get_prop_value('local_name'), + 'units' : 'count', 'type' : 'integer', + 'protected' : 'True', 'dimensions' : '()'} + if const_dict.find_variable(standard_name=vert_interface_dim, + any_scope=False) is None: + ind_var = Var(prop_dict, _API_SOURCE) + const_dict.add_variable(ind_var) + # end if + # end if + + return const_dict + +############################################################################### +def suite_part_call_list(host_model, const_dict, suite_part, subst_loop_vars): +############################################################################### + """Return the controlled call list for . + is the constituent dictionary""" + spart_args = suite_part.call_list.variable_list(loop_vars=subst_loop_vars) + hmvars = list() # Host model to spart dummy args + if subst_loop_vars: + loop_vars = host_model.loop_vars + else: + loop_vars = None + # end if + for sp_var in spart_args: + stdname = sp_var.get_prop_value('standard_name') + sp_lname = sp_var.get_prop_value('local_name') + var_dicts = [host_model, const_dict] + # Figure out which dictionary has the variable + for vdict in var_dicts: + hvar = vdict.find_variable(standard_name=stdname, any_scope=False) + if hvar is not None: + var_dict = vdict + break + # end if + # end for + if hvar is None: + errmsg = 'No host model variable for {} in {}' + raise CCPPError(errmsg.format(stdname, suite_part.name)) + # End if + if stdname not in CCPP_CONSTANT_VARS: + lname = var_dict.var_call_string(hvar, loop_vars=loop_vars) + hmvars.append("{}={}".format(sp_lname, lname)) + # End if + # End for + return ', '.join(hmvars) + +############################################################################### +def write_host_cap(host_model, api, output_dir, logger): +############################################################################### + """Write an API to allow to call any configured CCPP suite""" + module_name = "{}_ccpp_cap".format(host_model.name) + cap_filename = os.path.join(output_dir, '{}.F90'.format(module_name)) + if logger is not None: + msg = 'Writing CCPP Host Model Cap for {} to {}' + logger.info(msg.format(host_model.name, cap_filename)) + # End if + header = _HEADER.format(host_model=host_model.name) + with FortranWriter(cap_filename, 'w', header, module_name) as cap: + # Write module use statements + maxmod = len(KINDS_MODULE) + cap.write(' use {kinds}'.format(kinds=KINDS_MODULE), 1) + modules = host_model.variable_locations() + if modules: + mlen = max([len(x[0]) for x in modules]) + maxmod = max(maxmod, mlen) + # End if + mlen = max([len(x.module) for x in api.suites]) + maxmod = max(maxmod, mlen) + maxmod = max(maxmod, len(CONST_DDT_MOD)) + for mod in modules: + mspc = (maxmod - len(mod[0]))*' ' + cap.write("use {}, {}only: {}".format(mod[0], mspc, mod[1]), 1) + # End for + mspc = ' '*(maxmod - len(CONST_DDT_MOD)) + cap.write("use {}, {}only: {}".format(CONST_DDT_MOD, mspc, + CONST_DDT_NAME), 1) + cap.write_preamble() + max_suite_len = 0 + for suite in api.suites: + max_suite_len = max(max_suite_len, len(suite.module)) + # End for + cap.write("! Public Interfaces", 1) + # CCPP_STATE_MACH.transitions represents the host CCPP interface + for stage in CCPP_STATE_MACH.transitions(): + stmt = "public :: {host_model}_ccpp_physics_{stage}" + cap.write(stmt.format(host_model=host_model.name, stage=stage), 1) + # End for + API.declare_inspection_interfaces(cap) + # Write the host-model interfaces for constituents + reg_name = constituent_register_subname(host_model) + cap.write("public :: {}".format(reg_name), 1) + numconsts_name = constituent_num_consts_funcname(host_model) + cap.write("public :: {}".format(numconsts_name), 1) + copyin_name = constituent_copyin_subname(host_model) + cap.write("public :: {}".format(copyin_name), 1) + copyout_name = constituent_copyout_subname(host_model) + cap.write("public :: {}".format(copyout_name), 1) + cap.write("", 0) + cap.write("! Private module variables", 1) + const_dict = add_constituent_vars(cap, host_model, api.suites, logger) + cap.end_module_header() + for stage in CCPP_STATE_MACH.transitions(): + # Create a dict of local variables for stage + host_local_vars = VarDictionary("{}_{}".format(host_model.name, + stage)) + # Create part call lists + # Look for any loop-variable mismatch + for suite in api.suites: + spart_list = suite_part_list(suite, stage) + for spart in spart_list: + spart_args = spart.call_list.variable_list() + for sp_var in spart_args: + stdname = sp_var.get_prop_value('standard_name') + hvar = const_dict.find_variable(standard_name=stdname, + any_scope=True) + if hvar is None: + errmsg = 'No host model variable for {} in {}' + raise CCPPError(errmsg.format(stdname, spart.name)) + # End if + # End for (loop over part variables) + # End for (loop of suite parts) + # End for (loop over suites) + run_stage = stage == 'run' + # All interfaces need the suite name + apivars = [_SUITE_NAME_VAR] + if run_stage: + # Only the run phase needs a suite part name + apivars.append(_SUITE_PART_VAR) + # End if + # Create a list of dummy arguments with correct intent settings + callvars = host_model.call_list(stage) # Host interface dummy args + hdvars = list() + subst_dict = {} + for hvar in callvars: + protected = hvar.get_prop_value('protected') + stdname = hvar.get_prop_value('standard_name') + if stdname in CCPP_LOOP_VAR_STDNAMES: + protected = True # Cannot modify a loop variable + # End if + if protected: + subst_dict['intent'] = 'in' + else: + subst_dict['intent'] = 'inout' + # End if + hdvars.append(hvar.clone(subst_dict, + source_name=_API_SRC_NAME)) + # End for + lnames = [x.get_prop_value('local_name') for x in apivars + hdvars] + api_vlist = ", ".join(lnames) + cap.write(_SUBHEAD.format(api_vars=api_vlist, + host_model=host_model.name, + stage=stage), 1) + # Write out any suite part use statements + for suite in api.suites: + mspc = (max_suite_len - len(suite.module))*' ' + spart_list = suite_part_list(suite, stage) + for spart in spart_list: + stmt = "use {}, {}only: {}" + cap.write(stmt.format(suite.module, mspc, spart.name), 2) + # End for + # End for + # Write out any host model DDT input var use statements + host_model.ddt_lib.write_ddt_use_statements(hdvars, cap, 2, + pad=max_suite_len) + + cap.write("", 1) + # Write out dummy arguments + for var in apivars: + var.write_def(cap, 2, host_model) + # End for + for var in hdvars: + var.write_def(cap, 2, host_model) + # End for + for var in host_local_vars.variable_list(): + var.write_def(cap, 2, host_model) + # End for + cap.write('', 0) + # Write out the body clauses + errmsg_name, errflg_name = api.get_errinfo_names() + # Initialize err variables + cap.write('{errflg} = 0'.format(errflg=errflg_name), 2) + cap.write('{errmsg} = ""'.format(errmsg=errmsg_name), 2) + else_str = '' + for suite in api.suites: + stmt = "{}if (trim(suite_name) == '{}') then" + cap.write(stmt.format(else_str, suite.name), 2) + if stage == 'run': + el2_str = '' + spart_list = suite_part_list(suite, stage) + for spart in spart_list: + pname = spart.name[len(suite.name)+1:] + stmt = "{}if (trim(suite_part) == '{}') then" + cap.write(stmt.format(el2_str, pname), 3) + call_str = suite_part_call_list(host_model, const_dict, + spart, True) + cap.write("call {}({})".format(spart.name, call_str), 4) + el2_str = 'else ' + # End for + cap.write("else", 3) + emsg = "write({errmsg}, '(3a)')".format(errmsg=errmsg_name) + emsg += '"No suite part named ", ' + emsg += 'trim(suite_part), ' + emsg += '" found in suite {sname}"'.format(sname=suite.name) + cap.write(emsg, 4) + cap.write("{errflg} = 1".format(errflg=errflg_name), 4) + cap.write("end if", 3) + else: + spart = suite.phase_group(stage) + call_str = suite_part_call_list(host_model, const_dict, + spart, False) + stmt = "call {}_{}({})" + cap.write(stmt.format(suite.name, stage, call_str), 3) + # End if + else_str = 'else ' + # End for + cap.write("else", 2) + emsg = "write({errmsg}, '(3a)')".format(errmsg=errmsg_name) + emsg += '"No suite named ", ' + emsg += 'trim(suite_name), "found"' + cap.write(emsg, 3) + cap.write("{errflg} = 1".format(errflg=errflg_name), 3) + cap.write("end if", 2) + cap.write(_SUBFOOT.format(host_model=host_model.name, + stage=stage), 1) + # End for + # Write the API inspection routines (e.g., list of suites) + api.write_inspection_routines(cap) + # Write the constituent initialization interfaces + err_vars = host_model.find_error_variables() + const_obj_name = constituent_model_object_name(host_model) + cap.write("", 0) + const_names_name = constituent_model_const_stdnames(host_model) + const_indices_name = constituent_model_const_indices(host_model) + ConstituentVarDict.write_host_routines(cap, host_model, reg_name, + numconsts_name, copyin_name, + copyout_name, const_obj_name, + const_names_name, + const_indices_name, + api.suites, err_vars) + # End with + return cap_filename + +############################################################################### + +if __name__ == "__main__": + from parse_tools import init_log, set_log_to_null + _LOGGER = init_log('host_registry') + set_log_to_null(_LOGGER) + # Run doctest + import doctest + doctest.testmod() +# No else: diff --git a/scripts/host_model.py b/scripts/host_model.py new file mode 100644 index 00000000..725acda0 --- /dev/null +++ b/scripts/host_model.py @@ -0,0 +1,307 @@ +#!/usr/bin/env python + +""" +Parse a host-model registry XML file and return the captured variables. +""" + +# Python library imports +from __future__ import print_function +# CCPP framework imports +from metavar import VarDictionary +from ddt_library import VarDDT, DDTLibrary +from parse_tools import ParseContext, CCPPError, ParseInternalError +from parse_tools import context_string +from parse_tools import FORTRAN_SCALAR_REF_RE + +############################################################################### +class HostModel(VarDictionary): + """Class to hold the data from a host model""" + + def __init__(self, meta_tables, name_in, logger): + self.__name = name_in + self.__var_locations = {} # Local name to module map + self.__loop_vars = None # Loop control vars in interface calls + self.__used_variables = None # Local names which have been requested + self.__deferred_finds = None # Used variables that were missed at first + # First, process DDT headers + meta_headers = list() + for sect in [x.sections() for x in meta_tables.values()]: + meta_headers.extend(sect) + # end for + # Initialize our dictionaries + # Initialize variable dictionary + super(HostModel, self).__init__(self.name, logger=logger) + self.__ddt_lib = DDTLibrary('{}_ddts'.format(self.name), + ddts=[d for d in meta_headers + if d.header_type == 'ddt'], + logger=logger) + self.__ddt_dict = VarDictionary("{}_ddt_vars".format(self.name), + parent_dict=self, logger=logger) + # Now, process the code headers by type + self.__metadata_tables = meta_tables + for header in [h for h in meta_headers if h.header_type != 'ddt']: + title = header.title + if logger is not None: + msg = 'Adding {} {} to host model' + logger.debug(msg.format(header.header_type, title)) + # End if + if header.header_type == 'module': + # Set the variable modules + modname = header.title + for var in header.variable_list(): + self.add_variable(var) + lname = var.get_prop_value('local_name') + self.__var_locations[lname] = modname + self.ddt_lib.check_ddt_type(var, header, lname=lname) + if var.is_ddt(): + self.ddt_lib.collect_ddt_fields(self.__ddt_dict, var) + # End if + # End for + elif header.header_type == 'host': + if self.__name is None: + # Grab the first host name we see + self.__name = header.name + # End if + for var in header.variable_list(): + self.add_variable(var) + self.ddt_lib.check_ddt_type(var, header) + if var.is_ddt(): + self.ddt_lib.collect_ddt_fields(self.__ddt_dict, var) + # End if + # End for + loop_vars = header.variable_list(std_vars=False, + loop_vars=True, consts=False) + if loop_vars: + # loop_vars are part of the host-model interface call + # at run time. As such, they override the host-model + # array dimensions. + self.__loop_vars = VarDictionary(self.name) + # End if + for hvar in loop_vars: + std_name = hvar.get_prop_value('standard_name') + if std_name not in self.__loop_vars: + self.__loop_vars.add_variable(hvar) + else: + ovar = self.__loop_vars[std_name] + ctx1 = context_string(ovar.context) + ctx2 = context_string(hvar.context) + lname1 = ovar.get_prop_value('local_name') + lname2 = hvar.get_prop_value('local_name') + errmsg = ("Duplicate host loop var for {n}:\n" + " Dup: {l1}{c1}\n Orig: {l2}{c2}") + raise CCPPError(errmsg.format(n=self.name, + l1=lname1, c1=ctx1, + l2=lname2, c2=ctx2)) + # End if + # End for + else: + errmsg = "Invalid host model metadata header type, {} ({}){}" + errmsg += "\nType must be 'module' or 'host'" + ctx = context_string(header.context) + raise CCPPError(errmsg.format(header.title, + header.header_type, ctx)) + # End if + # End while + if self.name is None: + errmsg = 'No name found for host model, add a host metadata entry' + raise CCPPError(errmsg) + # End if + # Finally, turn on the use meter so we know which module variables + # to 'use' in a host cap. + self.__used_variables = set() # Local names which have been requested + self.__deferred_finds = set() # Used variables that were missed at first + + @property + def name(self): + """Return the host model name""" + return self.__name + + @property + def loop_vars(self): + """Return this host model's loop variables""" + return self.__loop_vars + + @property + def ddt_lib(self): + """Return this host model's DDT library""" + return self.__ddt_lib + +# XXgoldyXX: v needed? + @property + def constituent_module(self): + """Return the name of host model constituent module""" + return "{}_ccpp_constituents".format(self.name) +# XXgoldyXX: ^ needed? + + def argument_list(self, loop_vars=True): + """Return a string representing the host model variable arg list""" + args = [v.call_string(self) + for v in self.variable_list(loop_vars=loop_vars, consts=False)] + return ', '.join(args) + + def metadata_tables(self): + """Return a copy of this host models metadata tables""" + return dict(self.__metadata_tables) + + def host_variable_module(self, local_name): + """Return the module name for a host variable""" + if local_name in self.__var_locations: + return self.__var_locations[local_name] + # End if + return None + + def variable_locations(self): + """Return a set of module-variable and module-type pairs. + These represent the locations of all host model data with a listed + source location (variables with no source are omitted).""" + varset = set() + lnames = self.prop_list('local_name') + # Attempt to realize deferred lookups + if self.__deferred_finds is not None: + for std_name in list(self.__deferred_finds): + var = self.find_variable(standard_name=std_name) + if var is not None: + self.__deferred_finds.remove(std_name) + # End if + # End for + # End if + # Now, find all the used module variables + for name in lnames: + module = self.host_variable_module(name) + used = self.__used_variables and (name in self.__used_variables) + if module and used: + varset.add((module, name)) + # No else, either no module or a zero-length module name + # End if + # End for + return varset + + def find_variable(self, standard_name=None, source_var=None, + any_scope=False, clone=None, + search_call_list=False, loop_subst=False): + """Return the host model variable matching or None + If is True, substitute a begin:end range for an extent. + """ + my_var = super(HostModel, + self).find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, clone=clone, + search_call_list=search_call_list, + loop_subst=loop_subst) + if my_var is None: + # Check our DDT library + if standard_name is None: + if source_var is None: + emsg = ("One of or " + + "must be passed.") + raise ParseInternalError(emsg) + # end if + standard_name = source_var.get_prop_value('standard_name') + # end if + # Since we are the parent of the DDT library, only check that dict + my_var = self.__ddt_dict.find_variable(standard_name=standard_name, + any_scope=False) + # End if + if loop_subst: + if my_var is None: + my_var = self.find_loop_subst(standard_name) + # End if + if my_var is not None: + # If we get here, the host does not have the requested + # variable but does have a replacement set. Create a new + # variable to use to send to suites. + ##XXgoldyXX: This cannot be working since find_loop_subst + ## returns a tuple + new_name = self.new_internal_variable_name(prefix=self.name) + ctx = ParseContext(filename='host_model.py') + new_var = my_var.clone(new_name, source_name=self.name, + source_type="HOST", + context=ctx) + self.add_variable(new_var) + my_var = new_var + # End if + # End if + if my_var is None: + if self.__deferred_finds is not None: + self.__deferred_finds.add(standard_name) + # End if + elif self.__used_variables is not None: + lname = my_var.get_prop_value('local_name') + # Try to add any index references (should be method?) + imatch = FORTRAN_SCALAR_REF_RE.match(lname) + if imatch is not None: + vdims = [x.strip() for x in imatch.group(2).split(',') + if ':' not in x] + for vname in vdims: + _ = self.find_variable(standard_name=vname) + # End for + # End if + if isinstance(my_var, VarDDT): + lname = my_var.get_parent_prop('local_name') + # End if + self.__used_variables.add(lname) + # End if + return my_var + + def add_variable(self, newvar, exists_ok=False, gen_unique=False, + adjust_intent=False): + """Add if it does not conflict with existing entries. + For the host model, this includes entries in used DDT variables. + If is True, attempting to add an identical copy is okay. + If is True, a new local_name will be created if a + local_name collision is detected. + if is True, adjust conflicting intents to inout.""" + standard_name = newvar.get_prop_value('standard_name') + cvar = self.find_variable(standard_name=standard_name, any_scope=False) + if cvar is None: + # Check the DDT dictionary + cvar = self.__ddt_dict.find_variable(standard_name=standard_name, + any_scope=False) + # end if + if cvar and (not exists_ok): + emsg = "Attempt to add duplicate host model variable, {}{}." + emsg += "\nVariable originally defined{}" + ntx = context_string(newvar.context) + ctx = context_string(cvar.context) + raise CCPPError(emsg.format(standard_name, ntx, ctx)) + # end if + # No collision, proceed normally + super(HostModel, self).add_variable(newvar=newvar, exists_ok=exists_ok, + gen_unique=gen_unique, + adjust_intent=False) + + def add_host_variable_module(self, local_name, module, logger=None): + """Add a module name location for a host variable""" + if local_name not in self.__var_locations: + if logger is not None: + emsg = 'Adding variable, {}, from module, {}' + logger.debug(emsg.format(local_name, module)) + # End if + self.__var_locations[local_name] = module + else: + emsg = "Host variable, {}, already located in module" + raise CCPPError(emsg.format(self.__var_locations[local_name])) + # End if + + def call_list(self, phase): + "Return the list of variables passed by the host model to the host cap" + hdvars = list() + loop_vars = phase == 'run' + for hvar in self.variable_list(loop_vars=loop_vars, consts=False): + lname = hvar.get_prop_value('local_name') + if self.host_variable_module(lname) is None: + hdvars.append(hvar) + # End if + # End for + return hdvars + +############################################################################### + +if __name__ == "__main__": + from parse_tools import init_log, set_log_to_null + _LOGGER = init_log('host_registry') + set_log_to_null(_LOGGER) + # First, run doctest + import doctest + doctest.testmod() +# No else: diff --git a/scripts/metadata_parser.py b/scripts/metadata_parser.py index 1919ed4c..e3ac7440 100755 --- a/scripts/metadata_parser.py +++ b/scripts/metadata_parser.py @@ -12,8 +12,9 @@ from mkcap import Var sys.path.append(os.path.join(os.path.split(__file__)[0], 'fortran_tools')) -from parse_fortran import Ftype_type_decl -from metadata_table import MetadataHeader +from parse_fortran import FtypeTypeDecl +from parse_checkers import registered_fortran_ddt_names +from metadata_table import MetadataTable, parse_metadata_file # Output: This routine converts the argument tables for all subroutines / typedefs / kind / module variables # into dictionaries suitable to be used with ccpp_prebuild.py (which generates the fortran code for the caps) @@ -105,135 +106,129 @@ def read_new_metadata(filename, module_name, table_name, scheme_name = None, sub if filename in NEW_METADATA_SAVE.keys(): new_metadata_headers = NEW_METADATA_SAVE[filename] else: - new_metadata_headers = MetadataHeader.parse_metadata_file(filename) + new_metadata_headers = parse_metadata_file(filename, known_ddts=registered_fortran_ddt_names(), + logger=logging.getLogger(__name__)) NEW_METADATA_SAVE[filename] = new_metadata_headers # Record dependencies for the metadata table (only applies to schemes) - has_property_table = False dependencies = [] # Convert new metadata for requested table to old metadata dictionary metadata = collections.OrderedDict() for new_metadata_header in new_metadata_headers: - # Module or DDT tables - if not scheme_name: - # Module property tables - if new_metadata_header.property_table and new_metadata_header.title == module_name: - # If this is a ccpp-table-properties table for a module, it can only contain dependencies; - # ensure that for module tables, the header type is "module" - if not new_metadata_header.header_type == 'module': - raise Exception("Unsupported header_type '{}' for table properties for modules".format( - new_metadata_header.header_type)) - dependencies += new_metadata_header.dependencies - has_property_table = True - continue - # DDT property tables - elif new_metadata_header.property_table: - # If this is a ccpp-table-properties table for a DDT, it can only contain dependencies; - # ensure that for DDT tables, the header type is "ddt" - if not new_metadata_header.header_type == 'ddt': - raise Exception("Unsupported header_type '{}' for table properties for DDTs".format( - new_metadata_header.header_type)) - dependencies += new_metadata_header.dependencies - has_property_table = True - continue - # Module or DDT argument tables - else: - if not new_metadata_header.title == table_name: + for metadata_section in new_metadata_header.sections(): + # Module or DDT tables + if not scheme_name: + # Module property tables + if not metadata_section.title == table_name: # Skip this table, since it is not requested right now continue + # Distinguish between module argument tables and DDT argument tables - if new_metadata_header.title == module_name: + if metadata_section.title == module_name: container = encode_container(module_name) else: - container = encode_container(module_name, new_metadata_header.title) - else: - # Scheme property tables - if new_metadata_header.property_table and new_metadata_header.title == scheme_name: - # If this is a ccpp-table-properties table for a scheme, it can only contain dependencies; - # ensure that for scheme tables, the header type is "scheme" - if not new_metadata_header.header_type == 'scheme': - raise Exception("Unsupported header_type '{}' for table properties for schemes".format( - new_metadata_header.header_type)) - dependencies += new_metadata_header.dependencies - has_property_table = True - continue - # Scheme argument tables + container = encode_container(module_name, metadata_section.title) + + # Add to dependencies + if new_metadata_header.relative_path: + dependencies += [ os.path.join(new_metadata_header.relative_path, x) for x in new_metadata_header.dependencies] + else: + dependencies += new_metadata_header.dependencies else: - if not new_metadata_header.title == table_name: + # Scheme property tables + if not metadata_section.title == table_name: # Skip this table, since it is not requested right now continue + container = encode_container(module_name, scheme_name, table_name) - for new_var in new_metadata_header.variable_list(): - standard_name = new_var.get_prop_value('standard_name') - # DH* 2020-05-26 - # Legacy extension for inconsistent metadata (use of horizontal_dimension versus horizontal_loop_extent). - # Since horizontal_dimension and horizontal_loop_extent have the same attributes (otherwise it doesn't - # make sense), we swap the standard name and add a note to the long name - 2021-05-26: this is now an error. - legacy_note = '' - if standard_name == 'horizontal_loop_extent' and scheme_name and \ - (table_name.endswith("_init") or table_name.endswith("_finalize")): - #logging.warn("Legacy extension - replacing variable 'horizontal_loop_extent'" + \ - # " with 'horizontal_dimension' in table {}".format(table_name)) - #standard_name = 'horizontal_dimension' - #legacy_note = ' replaced by horizontal dimension (legacy extension)' - raise Exception("Legacy extension DISABLED: replacing variable 'horizontal_loop_extent'" + \ - " with 'horizontal_dimension' in table {}".format(table_name)) - elif standard_name == 'horizontal_dimension' and scheme_name and table_name.endswith("_run"): - #logging.warn("Legacy extension - replacing variable 'horizontal_dimension'" + \ - # " with 'horizontal_loop_extent' in table {}".format(table_name)) - #standard_name = 'horizontal_loop_extent' - #legacy_note = ' replaced by horizontal loop extent (legacy extension)' - raise Exception("Legacy extension DISABLED: replacing variable 'horizontal_dimension'" + \ - " with 'horizontal_loop_extent' in table {}".format(table_name)) - # Adjust dimensions - dimensions = new_var.get_prop_value('dimensions') - if scheme_name and (table_name.endswith("_init") or table_name.endswith("_finalize")) \ - and 'horizontal_loop_extent' in dimensions: - #logging.warn("Legacy extension - replacing dimension 'horizontal_loop_extent' with 'horizontal_dimension' " + \ - # "for variable {} in table {}".format(standard_name,table_name)) - #dimensions = ['horizontal_dimension' if x=='horizontal_loop_extent' else x for x in dimensions] - raise Exception("Legacy extension DISABLED: replacing dimension 'horizontal_loop_extent' with 'horizontal_dimension' " + \ - "for variable {} in table {}".format(standard_name,table_name)) - elif scheme_name and table_name.endswith("_run") and 'horizontal_dimension' in dimensions: - #logging.warn("Legacy extension - replacing dimension 'horizontal_dimension' with 'horizontal_loop_extent' " + \ - # "for variable {} in table {}".format(standard_name,table_name)) - #dimensions = ['horizontal_loop_extent' if x=='horizontal_dimension' else x for x in dimensions] - raise Exception("Legacy extension DISABLED: replacing dimension 'horizontal_dimension' with 'horizontal_loop_extent' " + \ - "for variable {} in table {}".format(standard_name,table_name)) - # *DH 2020-05-26 - if new_var.get_prop_value('active').lower() == '.true.': - active = 'T' - elif new_var.get_prop_value('active').lower() == '.false.': - active = 'F' - else: - # Replace multiple whitespaces, preserve case - active = ' '.join(new_var.get_prop_value('active').split()) - var = Var(standard_name = standard_name, - long_name = new_var.get_prop_value('long_name') + legacy_note, - units = new_var.get_prop_value('units'), - local_name = new_var.get_prop_value('local_name'), - type = new_var.get_prop_value('type'), - dimensions = dimensions, - container = container, - kind = new_var.get_prop_value('kind'), - intent = new_var.get_prop_value('intent'), - optional = 'T' if new_var.get_prop_value('optional') else 'F', - active = active, - ) - # Check for duplicates in same table - if standard_name in metadata.keys(): - raise Exception("Error, multiple definitions of standard name {} in new metadata table {}".format(standard_name, table_name)) - metadata[standard_name] = [var] - - # CCPP property tables are mandatory - if not has_property_table: - if scheme_name: - raise Exception("Metadata file {} for scheme {} does not have a [ccpp-table-properties] section,".format(filename, scheme_name) + \ - " or the 'name = ...' attribute in the [ccpp-table-properties] is wrong") - else: - raise Exception("Metadata file {} for table {} does not have a [ccpp-table-properties] section,".format(filename, table_name) + \ - " or the 'name = ...' attribute in the [ccpp-table-properties] is wrong") + + # Add to dependencies + if new_metadata_header.relative_path: + dependencies += [ os.path.join(new_metadata_header.relative_path, x) for x in new_metadata_header.dependencies] + else: + dependencies += new_metadata_header.dependencies + + for new_var in metadata_section.variable_list(): + standard_name = new_var.get_prop_value('standard_name') + # DH* 2020-05-26 + # Legacy extension for inconsistent metadata (use of horizontal_dimension versus horizontal_loop_extent). + # Since horizontal_dimension and horizontal_loop_extent have the same attributes (otherwise it doesn't + # make sense), we swap the standard name and add a note to the long name - 2021-05-26: this is now an error. + legacy_note = '' + if standard_name == 'horizontal_loop_extent' and scheme_name and \ + (table_name.endswith("_init") or table_name.endswith("_finalize")): + #logging.warn("Legacy extension - replacing variable 'horizontal_loop_extent'" + \ + # " with 'horizontal_dimension' in table {}".format(table_name)) + #standard_name = 'horizontal_dimension' + #legacy_note = ' replaced by horizontal dimension (legacy extension)' + raise Exception("Legacy extension DISABLED: replacing variable 'horizontal_loop_extent'" + \ + " with 'horizontal_dimension' in table {}".format(table_name)) + elif standard_name == 'horizontal_dimension' and scheme_name and table_name.endswith("_run"): + #logging.warn("Legacy extension - replacing variable 'horizontal_dimension'" + \ + # " with 'horizontal_loop_extent' in table {}".format(table_name)) + #standard_name = 'horizontal_loop_extent' + #legacy_note = ' replaced by horizontal loop extent (legacy extension)' + raise Exception("Legacy extension DISABLED: replacing variable 'horizontal_dimension'" + \ + " with 'horizontal_loop_extent' in table {}".format(table_name)) + + # Adjust dimensions + dimensions = new_var.get_prop_value('dimensions') + if scheme_name and (table_name.endswith("_init") or table_name.endswith("_finalize")) \ + and 'horizontal_loop_extent' in dimensions: + #logging.warn("Legacy extension - replacing dimension 'horizontal_loop_extent' with 'horizontal_dimension' " + \ + # "for variable {} in table {}".format(standard_name,table_name)) + #dimensions = ['horizontal_dimension' if x=='horizontal_loop_extent' else x for x in dimensions] + raise Exception("Legacy extension DISABLED: replacing dimension 'horizontal_loop_extent' with 'horizontal_dimension' " + \ + "for variable {} in table {}".format(standard_name,table_name)) + elif scheme_name and table_name.endswith("_run") and 'horizontal_dimension' in dimensions: + #logging.warn("Legacy extension - replacing dimension 'horizontal_dimension' with 'horizontal_loop_extent' " + \ + # "for variable {} in table {}".format(standard_name,table_name)) + #dimensions = ['horizontal_loop_extent' if x=='horizontal_dimension' else x for x in dimensions] + raise Exception("Legacy extension DISABLED: replacing dimension 'horizontal_dimension' with 'horizontal_loop_extent' " + \ + "for variable {} in table {}".format(standard_name,table_name)) + elif not scheme_name and 'horizontal_dimension' in dimensions: + raise Exception("Legacy extension DISABLED: replacing dimension 'horizontal_dimension' with 'horizontal_loop_extent' " + \ + "for variable {} in table {}".format(standard_name,table_name)) + # *DH 2020-05-26 + + if not new_var.get_prop_value('active'): + # If it doesn't have an active attribute, then the variable is always active (default) + active = 'T' + elif new_var.get_prop_value('active').lower() == '.true.': + active = 'T' + elif new_var.get_prop_value('active') and new_var.get_prop_value('active').lower() == '.false.': + active = 'F' + else: + # Replace multiple whitespaces, preserve case + active = ' '.join(new_var.get_prop_value('active').split()) + + # DH* 20210812 + # Workaround for Fortran DDTs incorrectly having the type of + # the DDT copied into the kind attribute in parse_metadata_file + if new_var.is_ddt() and new_var.get_prop_value('kind'): + kind = '' + else: + kind = new_var.get_prop_value('kind') + #kind = new_var.get_prop_value('kind') + # *DH 20210812 + + var = Var(standard_name = standard_name, + long_name = new_var.get_prop_value('long_name') + legacy_note, + units = new_var.get_prop_value('units'), + local_name = new_var.get_prop_value('local_name'), + type = new_var.get_prop_value('type').lower(), + dimensions = dimensions, + container = container, + kind = kind, + intent = new_var.get_prop_value('intent'), + optional = 'T' if new_var.get_prop_value('optional') else 'F', + active = active, + ) + # Check for duplicates in same table + if standard_name in metadata.keys(): + raise Exception("Error, multiple definitions of standard name {} in new metadata table {}".format(standard_name, table_name)) + metadata[standard_name] = [var] return (metadata, dependencies) @@ -350,10 +345,10 @@ def parse_variable_tables(filepath, filename): # If type is not the first word, ignore the word elif j>0: continue - # Detect type definition using Ftype_type_decl class, routine + # Detect type definition using FtypeTypeDecl class, routine # type_def_line and extract type_name else: - type_declaration = Ftype_type_decl.type_def_line(line.strip()) + type_declaration = FtypeTypeDecl.type_def_line(line.strip()) if in_type: raise Exception('Nested definitions of derived types not supported') in_type = True diff --git a/scripts/metadata_table.py b/scripts/metadata_table.py index bf07df51..777af4c5 100755 --- a/scripts/metadata_table.py +++ b/scripts/metadata_table.py @@ -1,21 +1,51 @@ #!/usr/bin/env python """ +There are four types of CCPP metadata tables, scheme, module, ddt, and host. +A metadata file contains one or more metadata tables. +A metadata file SHOULD NOT mix metadata table types. The exception is a + metadata file which contains one or more ddt tables followed by a module + or host table. + +Each metadata table begins with a 'ccpp-table-properties' section followed by + one or more 'ccpp-arg-table' sections. These sections are described below. +A 'ccpp-arg-table' section is followed by one or more variable declaration + sections, also described below. + Metadata headers are in config file format. -The argument tables for schemes and variable definitions should -have a special section followed by variable declaration sections. -The special section name is ccpp-arg-table. The entries in this section are: -name = : the name of the file object which immediately follows the - argument table. It is one of the following possibilities: - - SubroutineName: the name of a subroutine (i.e., the name of - a scheme interface function such as SchemeName_run) + +A 'ccpp-table-properties' section entries are: +name = : the name of the following ccpp-arg-table entries (required). + It is one of the following possibilities: + - SchemeName: the name of a scheme (i.e., the name of + a scheme interface (related to SubroutineName below). - DerivedTypeName: a derived type name for a type which will be used somewhere in the CCPP interface. - ModuleName: the name of the module whose module variables will be used somewhere in the CCPP interface -type = : The type of header, one of: + - HostName: the name of the host model. Variables in this section become + part of the CCPP UI, the CCPP routines called by the + host model (e.g., _ccpp_physics_run). +type = : The type of header (required), one of: - scheme: A CCPP subroutine - ddt: A header for a derived data type - module: A header on some module data + - host: A header on data which will be part of the CCPP UI + +The ccpp-arg-table section entries in this section are: +name = : the name of the file object which immediately follows the + argument table (required). + It is one of the following possibilities: + - SubroutineName: the name of a subroutine (i.e., the name of + a scheme interface function such as SchemeName_run) + - DerivedTypeName: a derived type name for a type which will be used + somewhere in the CCPP interface. + - ModuleName: the name of the module whose module variables will be + used somewhere in the CCPP interface + - HostName: the name of the host model. Variables in this section become + part of the CCPP UI, the CCPP routines called by the + host model (e.g., _ccpp_physics_run). +type = : The type of header (required). It must match the type of the + associated ccpp-table-properties section (see above). A variable declaration section begins with a variable name line (a local variable name enclosed in square brackets) followed by one or more @@ -26,8 +56,7 @@ Variable attribute statements may be combined on a line if separated by a vertical bar. -An example argument table is shown below (aside from the python comment -character at the start of each line). +An example argument table is shown below. [ccpp-table-properties] name = @@ -96,475 +125,1135 @@ """ # Python library imports -from __future__ import print_function -import os +import difflib +import os.path import re # CCPP framework imports -from common import CCPP_STAGES -from metavar import Var, VarDictionary -from parse_tools import ParseObject, ParseSource, register_fortran_ddt_name +from ccpp_state_machine import CCPP_STATE_MACH +from metavar import Var, VarDictionary, CCPP_CONSTANT_VARS +from parse_tools import ParseObject, ParseSource, ParseContext, context_string from parse_tools import ParseInternalError, ParseSyntaxError, CCPPError -from parse_tools import LITERAL_INT, FORTRAN_ID, FORTRAN_SCALAR_REF -from parse_tools import check_fortran_ref +from parse_tools import FORTRAN_ID, FORTRAN_SCALAR_REF, FORTRAN_SCALAR_REF_RE +from parse_tools import check_fortran_ref, check_fortran_id +from parse_tools import check_fortran_intrinsic +from parse_tools import register_fortran_ddt_name, unique_standard_name + +######################################################################## + +SCHEME_HEADER_TYPE = 'scheme' +_SINGLETON_TABLE_TYPES = ['ddt', 'host', 'module'] # Only one section per table +TABLE_TYPES = _SINGLETON_TABLE_TYPES + [SCHEME_HEADER_TYPE] +HEADER_TYPES = TABLE_TYPES + ['local'] +UNKNOWN_PROCESS_TYPE = 'UNKNOWN' + +_BLANK_LINE = re.compile(r"\s*[#;]") + +def blank_metadata_line(line): + """Return True if is a valid config format blank or comment + line. Also return True if we have reached the end of the file + (no line)""" + return (not line) or (_BLANK_LINE.match(line) is not None) ######################################################################## +def _parse_config_line(line, context): + """Parse a config line and return a list of keyword value pairs.""" + parse_items = list() + if line is None: + pass # No properties on this line + elif blank_metadata_line(line): + pass # No properties on this line + else: + properties = line.strip().split('|') + for prop in properties: + pitems = prop.split('=', 1) + if len(pitems) >= 2: + parse_items.append(pitems) + else: + raise ParseSyntaxError("variable property syntax", + token=prop, context=context) + # end if + # end for + # end if + return parse_items + +######################################################################## + +def parse_metadata_file(filename, known_ddts, logger): + """Parse and return list of parsed metadata tables""" + # Read all lines of the file at once + meta_tables = list() + table_titles = list() # Keep track of names in file + with open(filename, 'r') as infile: + fin_lines = infile.readlines() + for index, fin_line in enumerate(fin_lines): + fin_lines[index] = fin_line.rstrip('\n') + # end for + # end with + # Look for a header start + parse_obj = ParseObject(filename, fin_lines) + curr_line, curr_line_num = parse_obj.curr_line() + while curr_line is not None: + if MetadataTable.table_start(curr_line): + new_table = MetadataTable(parse_object=parse_obj, + known_ddts=known_ddts, logger=logger) + ntitle = new_table.table_name + if ntitle not in table_titles: + meta_tables.append(new_table) + table_titles.append(ntitle) + if new_table.table_type == 'ddt': + known_ddts.append(ntitle) + # end if + else: + errmsg = 'Duplicate metadata table, {}, at {}:{}' + ctx = curr_line_num + 1 + raise CCPPError(errmsg.format(ntitle, filename, ctx)) + # end if + curr_line, curr_line_num = parse_obj.curr_line() + elif blank_metadata_line(curr_line): + curr_line, curr_line_num = parse_obj.next_line() + else: + raise ParseSyntaxError('CCPP metadata line', token=curr_line, + context=parse_obj) + # end if + # end while + return meta_tables + +######################################################################## + +def find_scheme_names(filename): + """Find and return a list of all the physics scheme names in + . A scheme is identified by its ccpp-table-properties name. + """ + scheme_names = list() + with open(filename, 'r') as infile: + fin_lines = infile.readlines() + # end with + num_lines = len(fin_lines) + context = ParseContext(linenum=1, filename=filename) + while context.line_num <= num_lines: + if MetadataTable.table_start(fin_lines[context.line_num - 1]): + found_start = False + while not found_start: + line = fin_lines[context.line_num].strip() + context.line_num += 1 + if line and (line[0] == '['): + found_start = True + elif line: + props = _parse_config_line(line, context) + for prop in props: + # Look for name property + key = prop[0].strip().lower() + value = prop[1].strip() + if key == 'name': + scheme_names.append(value) + # end if + # end for + # end if + if context.line_num > num_lines: + break + # end if + # end while + else: + context.line_num += 1 + # end if + # end while + return scheme_names + +######################################################################## + +class MetadataTable(): + """Class to hold a CCPP Metadata table including the table header + (ccpp-table-properties section) and all of the associated table + sections (ccpp-arg-table sections).""" + + __table_start = re.compile(r"(?i)\s*\[\s*ccpp-table-properties\s*\]") + + def __init__(self, table_name_in=None, table_type_in=None, + dependencies=None, relative_path=None, known_ddts=None, + var_dict=None, module=None, parse_object=None, logger=None): + """Initialize a MetadataTable, either with a name, , and + type, , or with information from a file (). + if is None, and are + also stored. + If and / or module are passed (not allowed with + ".format(self.__class__.__name__, + self.table_name, id(self)) + + def __str__(self): + '''Print string for MetadataTable objects''' + return "<{} {}>".format(self.__class__.__name__, self.table_name) + + @classmethod + def table_start(cls, line): + """Return True iff is a ccpp-table-properties header statement. + """ + if (line is None) or blank_metadata_line(line): + match = None + else: + match = cls.__table_start.match(line) + # end if + return match is not None + ######################################################################## -class MetadataHeader(ParseSource): +class MetadataSection(ParseSource): """Class to hold all information from a metadata header - >>> MetadataHeader(ParseObject("foobar.txt", \ - ["name = foobar", "type = scheme", "module = foo", \ + >>> MetadataSection("footable", "scheme", \ + parse_object=ParseObject("foobar.txt", \ + ["name = footable", "type = scheme", "module = foo", \ "[ im ]", "standard_name = horizontal_loop_extent", \ "long_name = horizontal loop extent, start at 1", \ "units = index | type = integer", \ "dimensions = () | intent = in"])) #doctest: +ELLIPSIS - <__main__.MetadataHeader foo / foobar at 0x...> - >>> MetadataHeader(ParseObject("foobar.txt", \ - ["name = foobar", "type = scheme", "module = foobar", \ + <__main__.MetadataSection foo / footable at 0x...> + >>> MetadataSection("footable", "scheme", \ + parse_object=ParseObject("foobar.txt", \ + ["name = footable", "type = scheme", "module = foobar", \ "[ im ]", "standard_name = horizontal_loop_extent", \ "long_name = horizontal loop extent, start at 1", \ "units = index | type = integer", \ - "dimensions = () | intent = in"])).get_var(standard_name='horizontal_loop_extent') #doctest: +ELLIPSIS + "dimensions = () | intent = in"])).find_variable('horizontal_loop_extent') #doctest: +ELLIPSIS - >>> MetadataHeader(ParseObject("foobar.txt", \ - ["name = foobar", "module = foo", \ + >>> MetadataSection("footable", "scheme", \ + parse_object=ParseObject("foobar.txt", \ + ["name = footable", "type = scheme", "module = foobar", \ + "process = microphysics", "[ im ]", \ + "standard_name = horizontal_loop_extent", \ + "long_name = horizontal loop extent, start at 1", \ + "units = index | type = integer", \ + "dimensions = () | intent = in"])).find_variable('horizontal_loop_extent') #doctest: +ELLIPSIS + + >>> MetadataSection("footable", "scheme", \ + parse_object=ParseObject("foobar.txt", \ + ["name = footable", "type=scheme", "module = foo", \ "[ im ]", "standard_name = horizontal_loop_extent", \ "long_name = horizontal loop extent, start at 1", \ "units = index | type = integer", \ "dimensions = () | intent = in", \ - " subroutine foo()"])).get_var(standard_name='horizontal_loop_extent') #doctest: +IGNORE_EXCEPTION_DETAIL + " subroutine foo()"])).find_variable('horizontal_loop_extent') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): - ParseSyntaxError: Missing metadata header type, at foobar.txt:7 - >>> MetadataHeader(ParseObject("foobar.txt", \ - ["name = foobar", "type = scheme", "module=foobar", \ + parse_source.ParseSyntaxError: Invalid variable property syntax, 'subroutine foo()', at foobar.txt:9 + >>> MetadataSection("footable", "scheme", \ + parse_object=ParseObject("foobar.txt", \ + ["name = footable", "type = scheme", "module=foobar", \ "[ im ]", "standard_name = horizontal_loop_extent", \ "long_name = horizontal loop extent, start at 1", \ "units = index | type = integer", \ "dimensions = () | intent = in", \ - ""], line_start=0)).get_var(standard_name='horizontal_loop_extent').get_prop_value('local_name') + ""], line_start=0)).find_variable('horizontal_loop_extent').get_prop_value('local_name') 'im' - >>> MetadataHeader(ParseObject("foobar.txt", \ - ["name = foobar", "type = scheme" \ - "[ im ]", "standard_name = horizontal loop extent", \ + >>> MetadataSection("footable", "scheme", \ + parse_object=ParseObject("foobar.txt", \ + ["name = footable", "type = scheme" \ + "[ im ]", "standard_name = horizontalloop extent", \ "long_name = horizontal loop extent, start at 1", \ "units = index | type = integer", \ "dimensions = () | intent = in", \ - ""], line_start=0)).get_var(standard_name='horizontal_loop_extent') #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ParseSyntaxError: Invalid variable property value, 'horizontal loop extent', at foobar.txt:2 - >>> MetadataHeader(ParseObject("foobar.txt", \ + ""], line_start=0)).find_variable('horizontal_loop_extent') + + >>> MetadataSection("footable", "scheme", \ + parse_object=ParseObject("foobar.txt", \ ["[ccpp-arg-table]", "name = foobar", "type = scheme" \ "[ im ]", "standard_name = horizontal loop extent", \ "long_name = horizontal loop extent, start at 1", \ "units = index | type = integer", \ "dimensions = () | intent = in", \ - ""], line_start=0)).get_var(standard_name='horizontal_loop_extent') #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ParseSyntaxError: Invalid property syntax, '[ccpp-arg-table]', at foobar.txt:1 - >>> MetadataHeader(ParseObject("foobar.txt", \ + ""], line_start=0)).find_variable('horizontal_loop_extent') + + >>> MetadataSection("foobar", "scheme", \ + parse_object=ParseObject("foobar.txt", \ ["name = foobar", "module = foo" \ "[ im ]", "standard_name = horizontal loop extent", \ "long_name = horizontal loop extent, start at 1", \ "units = index | type = integer", \ "dimensions = () | intent = in", \ - ""], line_start=0)).get_var(standard_name='horizontal_loop_extent') #doctest: +IGNORE_EXCEPTION_DETAIL - Traceback (most recent call last): - ParseSyntaxError: Invalid metadata header start, no table type, at foobar.txt:2 - >>> MetadataHeader.__var_start__.match('[ qval ]') #doctest: +ELLIPSIS - <_sre.SRE_Match object at 0x...> - >>> MetadataHeader.__var_start__.match('[ qval(hi_mom) ]') #doctest: +ELLIPSIS - <_sre.SRE_Match object at 0x...> -""" + ""], line_start=0)).find_variable('horizontal_loop_extent') - __header_start__ = re.compile(r"(?i)\s*\[\s*(ccpp-table-properties|ccpp-arg-table)\s*\]") - - __var_start__ = re.compile(r"^\[\s*("+FORTRAN_ID+r"|"+LITERAL_INT+r"|"+FORTRAN_SCALAR_REF+r")\s*\]$") + >>> MetadataSection("foobar", "scheme", \ + parse_object=ParseObject("foobar.txt", \ + ["name = foobar", "foo = bar" \ + "[ im ]", "standard_name = horizontal loop extent", \ + "long_name = horizontal loop extent, start at 1", \ + "units = index | type = integer", \ + "dimensions = () | intent = in", \ + ""], line_start=0)).find_variable('horizontal_loop_extent') - __blank_line__ = re.compile(r"\s*[#;]") + >>> MetadataSection.header_start('[ ccpp-arg-table ]') + True + >>> MetadataSection.header_start('[ qval ]') + False + >>> MetadataSection.header_start(' local_name = foo') + False + >>> MetadataSection.variable_start('[ qval ]', ParseObject('foo.meta', [])) + 'qval' + >>> MetadataSection.variable_start('[ qval(hi_mom) ]', ParseObject('foo.meta', [])) + 'qval(hi_mom)' + >>> MetadataSection.variable_start(' local_name = foo', ParseContext(filename='foo.meta', linenum=1)) - __html_template__ = """ - - -{title} - - - -
-{header}{contents}
- - """ - def __init__(self, parse_object=None, - title=None, type_in=None, module=None, var_dict=None, - property_table=False, logger=None): - self._pobj = parse_object + __header_start = re.compile(r"(?i)\s*\[\s*ccpp-arg-table\s*\]") + + __var_start = re.compile(r"^\[\s*"+FORTRAN_ID+r"\s*\]$") + + __vref_start = re.compile(r"^\[\s*"+FORTRAN_SCALAR_REF+r"\s*\]$") + + def __init__(self, table_name, table_type, parse_object=None, + title=None, type_in=None, module=None, process_type=None, + var_dict=None, known_ddts=None, logger=None): """If is not None, initialize from the current file and location in . If is None, initialize from , <type>, <module>, and <var_dict>. Note that if <parse_object> is not None, <title>, <type>, <module>, and <var_dict> are ignored. + <table_name> and <table_type> are the name and type of the + metadata header of which this section is a part. They must match + the type and name of this section (once the name action has been + removed, e.g., name = foo_init matches type foo). """ + self.__pobj = parse_object + self.__variables = None # In case __init__ crashes + self.__section_title = None + self.__header_type = None + self.__module_name = None + self.__process_type = UNKNOWN_PROCESS_TYPE + self.__section_valid = True if parse_object is None: - if title is None: - raise ParseInternalError('MetadataHeader requires a title') + if title is not None: + self.__section_title = title else: - self._table_title = title - # End if + raise ParseInternalError('MetadataSection requires a title') + # end if if type_in is None: - raise ParseInternalError('MetadataHeader requires a header type') + perr = 'MetadataSection requires a header type' + raise ParseInternalError(perr) + # end if + if type_in in HEADER_TYPES: + self.__header_type = type_in else: - self._header_type = type - # End if - if module is None: - raise ParseInternalError('MetadataHeader requires a module name') + self.__pobj.add_syntax_err("metadata arg table type", + token=type_in) + self.__section_valid = False + # end if + mismatch = self.section_table_mismatch(table_name, table_type) + if mismatch: + self.__pobj.add_syntax_err(mismatch) + self.__section_valid = False + # end if + mismatch = self.section_table_mismatch(table_name, table_type) + if mismatch: + raise CCPPError(mismatch) + # end if + if module is not None: + self.__module_name = module else: - self._module_name = module - # End if + perr = "MetadataSection requires a module name" + self.__pobj.add_syntax_err(perr) + self.__section_valid = False + # end if + if process_type is None: + self.__process_type = UNKNOWN_PROCESS_TYPE + else: + self.__process_type = process_type + # end if # Initialize our ParseSource parent - super(MetadataHeader, self).__init__(self.title, - self.header_type, self._pobj) - self._variables = VarDictionary(self.title, logger=logger) + super(MetadataSection, self).__init__(self.title, + self.header_type, self.__pobj) + self.__variables = VarDictionary(self.title, logger=logger) for var in var_dict.variable_list(): # Let this crash if no dict - self._variables.add_variable(var) - # End for + self.__variables.add_variable(var) + # end for + self.__start_context = None else: - self.__init_from_file__(parse_object, property_table, logger) - # End if + if known_ddts is None: + known_ddts = list() + # end if + self.__start_context = ParseContext(context=self.__pobj) + self.__init_from_file(table_name, table_type, known_ddts, logger) + # end if + # Register this header if it is a DDT + if self.header_type == 'ddt': + register_fortran_ddt_name(self.title) + # end if # Categorize the variables self._var_intents = {'in' : list(), 'out' : list(), 'inout' : list()} for var in self.variable_list(): intent = var.get_prop_value('intent') if intent is not None: self._var_intents[intent].append(var) - # End if - # End for - - def __init_from_file__(self, parse_object, property_table, logger): - # Read the table preamble, assume the caller already figured out - # the first line of the header using the table_start method. - curr_line, curr_line_num = self._pobj.next_line() - self._table_title = None - self._header_type = None - self._module_name = None - self._dependencies = [] - relative_path_local = '' - self._property_table = property_table - while (curr_line is not None) and (not self.variable_start(curr_line)) and (not MetadataHeader.table_start(curr_line)): - for property in self.parse_config_line(curr_line): + # end if + # end for + + def _default_module(self): + """Set a default module for this header""" + mfile = self.__pobj.file_name + if mfile[-5:] == '.meta': + # Default value is a Fortran module that matches the filename + def_mod = os.path.basename(mfile)[:-5] + else: + def_mod = os.path.basename(mfile) + last_dot = def_mod.rfind('.') + if last_dot >= 0: + ldef = len(def_mod) + def_mod = def_mod[:last_dot-ldef] + # end if + # end if + return def_mod + + def __init_from_file(self, table_name, table_type, known_ddts, logger): + """ Read the section preamble, assume the caller already figured out + the first line of the header using the header_start method.""" + start_ctx = context_string(self.__pobj) + curr_line, _ = self.__pobj.next_line() # Skip past [ccpp-arg-table] + while ((curr_line is not None) and + (not MetadataSection.variable_start(curr_line, self.__pobj)) and + (not MetadataSection.header_start(curr_line)) and + (not MetadataTable.table_start(curr_line))): + for prop in _parse_config_line(curr_line, self.__pobj): # Manually parse name, type, and module properties - key = property[0].strip().lower() - value = property[1].strip() + key = prop[0].strip().lower() + value = prop[1].strip() if key == 'name': - self._table_title = value + self.__section_title = value elif key == 'type': - if value not in ['module', 'scheme', 'ddt']: - raise ParseSyntaxError("metadata table type", - token=value, - context=self._pobj) - # End if - self._header_type = value + if value not in HEADER_TYPES: + self.__pobj.add_syntax_err("metadata table type", + token=value) + self.__section_valid = False + close = difflib.get_close_matches(value, HEADER_TYPES) + if close: + self.__header_type = close[0] # Allow error continue + # end if + # end if + # Set value even if error so future error msgs make sense + self.__header_type = value elif key == 'module': - if value == "None": - raise ParseSyntaxError("metadata table, no module", - context=self._pobj) + if value != "None": + self.__module_name = value else: - self._module_name = value - # End if - elif key == 'dependencies': - if not(value == "None" or value == ""): - # Remove trailing comma, remove white spaces from each list element - self._dependencies += [ v.strip() for v in value.rstrip(",").split(",") ] - elif key == 'relative_path': - relative_path_local = value.strip() + self.__pobj.add_syntax_err("metadata table, no module") + self.__module_name = 'INVALID' # Allow error continue + self.__section_valid = False + # end if + elif key == 'process': + self.__process_type = value else: - raise ParseSyntaxError("metadata table start property", - token=value, context=self._pobj) - # End if - # End for - curr_line, curr_line_num = self._pobj.next_line() - # End while + self.__pobj.add_syntax_err("metadata table start property", + token=value) + self.__process_type = 'INVALID' # Allow error continue + self.__section_valid = False + # end if + # end for + curr_line, _ = self.__pobj.next_line() + # end while if self.title is None: - raise ParseSyntaxError("metadata header start, no table name", - token=curr_line, context=self._pobj) - elif self.header_type is None: - raise ParseSyntaxError("metadata header start, no table type", - token=curr_line, context=self._pobj) - elif self.header_type == "ddt": - register_fortran_ddt_name(self.title) - # End if - # Add relative path to dependencies - if self.dependencies and relative_path_local: - self._dependencies = [ os.path.join(relative_path_local, v) for v in self.dependencies] + self.__pobj.add_syntax_err("metadata header start, no table name", + token=curr_line) + self.__section_valid = False + # end if + if self.header_type is None: + self.__pobj.add_syntax_err("metadata header start, no table type", + token=curr_line) + self.__section_valid = False + # end if + if ((self.header_type != SCHEME_HEADER_TYPE) and + (self.process_type != UNKNOWN_PROCESS_TYPE)): + emsg = "process keyword only allowed for a scheme" + self.__pobj.add_syntax_err(emsg, token=curr_line) + self.__process_type = UNKNOWN_PROCESS_TYPE # Allow error continue + self.__section_valid = False + # end if + mismatch = self.section_table_mismatch(table_name, table_type) + if mismatch: + self.__pobj.add_syntax_err(mismatch) + self.__section_valid = False + # end if + if logger: + logger.info("Parsing {} {}{}".format(self.header_type, + self.title, start_ctx)) + # end if + if self.header_type == "ddt": + known_ddts.append(self.title) + # end if + # We need a default module if none was listed + if self.module is None: + self.__module_name = self._default_module() + # end if # Initialize our ParseSource parent - super(MetadataHeader, self).__init__(self.title, - self.header_type, self._pobj) + super(MetadataSection, self).__init__(self.title, + self.header_type, self.__pobj) # Read the variables - valid_lines = True - self._variables = VarDictionary(self.title, logger=logger) + valid_lines = True + self.__variables = VarDictionary(self.title, logger=logger) while valid_lines: - newvar, curr_line = self.parse_variable(curr_line) + newvar, curr_line = self.parse_variable(curr_line, known_ddts) valid_lines = newvar is not None if valid_lines: - self._variables.add_variable(newvar) + if logger: + dmsg = 'Adding {} to {}' + lname = newvar.get_prop_value('local_name') + logger.debug(dmsg.format(lname, self.title)) + # end if + self.__variables.add_variable(newvar) # Check to see if we hit the end of the table - valid_lines = not MetadataHeader.table_start(curr_line) - # No else, we just run off the end of the table - # End if - # End while - - def parse_config_line(self, line): - "Parse a config line and return a list of keyword value pairs." - parse_items = list() - if line is None: - pass # No properties on this line - elif MetadataHeader.is_blank(line): - pass # No properties on this line - else: - properties = line.strip().split('|') - for property in properties: - pitems = property.split('=', 1) - if len(pitems) < 2: - raise ParseSyntaxError("variable property syntax", - token=property, - context=self._pobj) - else: - parse_items.append(pitems) - # End if - # End for - # End if - return parse_items - - def parse_variable(self, curr_line): - # The header line has the format [ <valid_fortran_symbol> ] - # Parse header - valid_line = (curr_line is not None) and (not MetadataHeader.table_start(curr_line)) + valid_lines = not MetadataSection.header_start(curr_line) + else: + # We have a bad variable, see if we have more variables + lname = MetadataSection.variable_start(curr_line, self.__pobj) + valid_lines = lname is not None + # end while + # end if + # end while + + def parse_variable(self, curr_line, known_ddts): + """Parse a new metadata variable beginning on <curr_line>. + The header line has the format [ <valid_fortran_symbol> ]. + """ + newvar = None + var_ok = True # Set to False if an error is detected + valid_line = ((curr_line is not None) and + (not MetadataSection.header_start(curr_line)) and + (not MetadataTable.table_start(curr_line))) if valid_line: - local_name = self.variable_start(curr_line) # caller handles exception + # variable_start handles exception + local_name = MetadataSection.variable_start(curr_line, self.__pobj) else: local_name = None - # End if + # end if if local_name is None: # This is not a valid variable line, punt (should be end of table) valid_line = False - # End if + # end if # Parse lines until invalid line is found # NB: Header variables cannot have embedded blank lines if valid_line: var_props = {} var_props['local_name'] = local_name + # Grab context that points at beginning of definition + context = ParseContext(context=self.__pobj) else: var_props = None - # End if + # end if while valid_line: - curr_line, curr_line_num = self._pobj.next_line() + curr_line, _ = self.__pobj.next_line() valid_line = ((curr_line is not None) and - (not MetadataHeader.is_blank(curr_line)) and - (not MetadataHeader.table_start(curr_line)) and - (self.variable_start(curr_line) is None)) + (not MetadataSection.header_start(curr_line)) and + (not MetadataTable.table_start(curr_line)) and + (MetadataSection.variable_start(curr_line, + self.__pobj) is None)) # A valid line may have multiple properties (separated by '|') if valid_line: - properties = self.parse_config_line(curr_line) - for property in properties: - try: - pname = property[0].strip() - pval_str = property[1].strip() + properties = _parse_config_line(curr_line, self.__pobj) + for prop in properties: + pname = prop[0].strip().lower() + pval_str = prop[1].strip() + if ((pname == 'type') and + (not check_fortran_intrinsic(pval_str, error=False))): + if pval_str in known_ddts: + pval = pval_str + pname = 'ddt_type' + else: + errmsg = "Unknown DDT type, {}".format(pval_str) + self.__pobj.add_syntax_err(errmsg) + self.__section_valid = False + var_ok = False + # end if + else: # Make sure this is a match - hp = Var.get_prop(pname) - if hp is not None: - pval = hp.valid_value(pval_str) + check_prop = Var.get_prop(pname) + if check_prop is not None: + pval = check_prop.valid_value(pval_str) else: - raise ParseSyntaxError("variable property name", - token=pname, - context=self._pobj) - # End if + emsg = "variable property name" + self.__pobj.add_syntax_err(emsg, token=pname) + self.__section_valid = False + var_ok = False + # end if if pval is None: - raise ParseSyntaxError("'{}' property value".format(pname), - token=pval_str, - context=self._pobj) - # End if - except ParseSyntaxError as p: - raise p - # If we get this far, we have a valid property. - var_props[pname] = pval - # End for - # End if - # End while - if var_props is None: - return None, curr_line - else: + errmsg = "'{}' property value" + self.__pobj.add_syntax_err(errmsg.format(pname), + token=pval_str) + self.__section_valid = False + var_ok = False + # end if + # end if + if var_ok: + # If we get this far, we have a valid property. + # Special case for dimensions, turn them into ranges + if pname == 'dimensions': + porig = pval + pval = list() + for dim in porig: + if ':' in dim: + pval.append(dim) + else: + cone_str = 'ccpp_constant_one:{}' + pval.append(cone_str.format(dim)) + # end if + # end for + # end if + # Add the property to our Var dictionary + var_props[pname] = pval + # end if + # end for + # end if + # end while + if var_ok and (var_props is not None): + # Check for array reference + sub_name = MetadataSection.check_array_reference(local_name, + var_props, context) + if sub_name: + var_props['local_name'] = sub_name + # end if (else just leave the local name alone) try: - newvar = Var(var_props, source=self) - except CCPPError as ve: - raise ParseSyntaxError(ve, context=self._pobj) - return newvar, curr_line - # End if - - def variable_list(self): - "Return an ordered list of the header's variables" - return self._variables.variable_list() - - def to_html(self, outdir, props): - """Write html file for metadata table and return filename. - Skip metadata headers without variables""" - if not self._variables.variable_list(): - return None - # Write table header - header = "<tr>" - for prop in props: - header += "<th>{}</th>".format(prop) - header += "</tr>\n" - # Write table contents, one row per variable - contents = "" - for var in self._variables.variable_list(): - row = "<tr>" - for prop in props: - value = var.get_prop_value(prop) - # Pretty-print for dimensions - if prop == 'dimensions': - value = '(' + ', '.join(value) + ')' - elif value is None: - value = "n/a" - row += "<td>{}</td>".format(value) - row += "</tr>\n" - contents += row - filename = os.path.join(outdir, self.title + '.html') - with open(filename,"w") as f: - f.writelines(self.__html_template__.format(title=self.title + ' argument table', - header=header, contents=contents)) - return filename - - def get_var(self, standard_name=None, intent=None): - if standard_name is not None: - var = self._variables.find_variable(standard_name) - return var - elif intent is not None: - if intent not in self._var_intents: - raise ParseInternalError("Illegal intent type, '{}', in {}".format(intent, self.title), context=self._pobj) - # End if - return self._var_intents[intent] + newvar = Var(var_props, source=self, context=context) + except CCPPError as verr: + self.__pobj.add_syntax_err(verr, skip_context=True) + var_ok = False + self.__section_valid = False + # end try + # No else, will return None for newvar + # end if + return newvar, curr_line + + @staticmethod + def check_array_reference(local_name, var_dict, context): + """If <local_name> is an array reference, check it against + the 'dimensions' property in <var_dict>. If <local_name> is an + array reference, return it with the colons filled in with the + dictionary dimensions, otherwise, return None. + >>> MetadataSection.check_array_reference('foo', {'dimensions':['ccpp_constant_one:bar', 'ccpp_constant_one:baz']}, ParseContext(filename='foo.meta')) + + >>> MetadataSection.check_array_reference('foo', {}, ParseContext(filename='foo.meta')) + + >>> MetadataSection.check_array_reference('foo(qux', {'dimensions':['ccpp_constant_one:bar', 'ccpp_constant_one:baz']}, ParseContext(filename='foo.meta')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: Invalid scalar reference, foo(qux, in foo.meta + >>> MetadataSection.check_array_reference('foo(qux)', {'dimensions':['ccpp_constant_one:bar', 'ccpp_constant_one:baz']}, ParseContext(filename='foo.meta')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: foo has rank 2 but foo(qux) has 0, in foo.meta + >>> MetadataSection.check_array_reference('foo(:,qux)', {'dimensions':['ccpp_constant_one:bar', 'ccpp_constant_one:baz']}, ParseContext(filename='foo.meta')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: foo has rank 2 but foo(:,qux) has 1, in foo.meta + >>> MetadataSection.check_array_reference('foo(:,qux)', {'foo':['ccpp_constant_one:bar', 'ccpp_constant_one:baz']}, ParseContext(filename='foo.meta')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: Missing variable dimensions, foo(:,qux), in foo.meta + >>> MetadataSection.check_array_reference('foo(:,:,qux)', {'dimensions':['ccpp_constant_one:bar']}, ParseContext(filename='foo.meta')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: foo has rank 1 but foo(:,:,qux) has 2, in foo.meta + >>> MetadataSection.check_array_reference('foo(:,:,qux)', {'dimensions':['ccpp_constant_one:bar','ccpp_constant_one:baz']}, ParseContext(filename='foo.meta')) + 'foo(:, :, qux)' + """ + retval = None + if check_fortran_id(local_name, var_dict, False) is None: + rmatch = FORTRAN_SCALAR_REF_RE.match(local_name) + if rmatch is None: + errmsg = 'Invalid scalar reference, {}{}' + ctx = context_string(context) + raise ParseInternalError(errmsg.format(local_name, ctx)) + # end if + rname = rmatch.group(1) + rdims = [x.strip() for x in rmatch.group(2).split(',')] + if 'dimensions' in var_dict: + vdims = [x.strip() for x in var_dict['dimensions']] + else: + errmsg = 'Missing variable dimensions, {}{}' + ctx = context_string(context) + raise ParseInternalError(errmsg.format(local_name, ctx)) + # end if + colon_rank = len([x for x in rdims if x == ':']) + if colon_rank != len(vdims): + errmsg = '{} has rank {} but {} has {}{}' + ctx = context_string(context) + raise ParseInternalError(errmsg.format(rname, len(vdims), + local_name, colon_rank, + ctx)) + # end if + sub_dims = list() + sindex = 0 + for rind in rdims: + if rind == ':': + sub_dims.append(':') + sindex += 1 + else: + sub_dims.append(rind) + # end if + # end for + retval = '{}({})'.format(rname, ', '.join(sub_dims)) + # end if + return retval + + def variable_list(self, std_vars=True, loop_vars=True, consts=True): + """Return an ordered list of the header's variables""" + return self.__variables.variable_list(recursive=False, + std_vars=std_vars, + loop_vars=loop_vars, + consts=consts) + + def find_variable(self, std_name, use_local_name=False): + """Find a variable in this header's dictionary""" + var = None + if use_local_name: + var = self.__variables.find_local_name(std_name) else: - return None + var = self.__variables.find_variable(std_name, any_scope=False) + # end if + return var + + def convert_dims_to_standard_names(self, var, logger=None, context=None): + """Convert the dimension elements in <var> to standard names by + by using other variables in this header. + """ + std_dims = list() + vdims = var.get_dimensions() + # Check for bad dimensions + if vdims is None: + vdim_prop = var.get_prop_value('dimensions').strip() + if vdim_prop[0] == '(': + vdim_prop = vdim_prop[1:] + # end if + if vdim_prop[-1] == ')': + vdim_prop = vdim_prop[0:-1] + # end if + vdim_strs = [x.strip() for x in vdim_prop.split(',')] + lname = var.get_prop_value('local_name') + ctx = context_string(var.context) + sep = '' + errstr = "{}{}: Invalid dimension, '{}'{}" + errmsg = '' + for vdim in vdim_strs: + if not check_fortran_id(vdim, None, False): + errmsg += errstr.format(sep, lname, vdim, ctx) + sep = '\n' + # end if + # end for + raise CCPPError("{}".format(errmsg)) + # end if + for dim in vdims: + std_dim = list() + if ':' not in dim: + # Metadata dimensions always have an explicit start + var_one = CCPP_CONSTANT_VARS.find_local_name('1') + if var_one is not None: + std = var_one.get_prop_value('standard_name') + std_dim.append(std) + # end if + # end if + for item in dim.split(':'): + try: + _ = int(item) + dvar = CCPP_CONSTANT_VARS.find_local_name(item) + if dvar is not None: + # If this integer value is a CCPP standard int, use that + dname = dvar.get_prop_value('standard_name') + else: + # Some non-standard integer value + dname = item + # end if + except ValueError: + # Not an integer, try to find the standard_name + if not item: + # Naked colons are okay + dname = '' + else: + dvar = self.find_variable(item, use_local_name=True) + if dvar is not None: + dname = dvar.get_prop_value('standard_name') + else: + dname = None + # end if + # end if + if dname is None: + errmsg = "Unknown dimension element, {}, in {}{}" + std = var.get_prop_value('local_name') + ctx = context_string(context) + if logger is not None: + errmsg = "WARNING: " + errmsg + logger.error(errmsg.format(item, std, ctx)) + dname = unique_standard_name() + else: + raise CCPPError(errmsg.format(item, std, ctx)) + # end if + # end if + # end try + if dname is not None: + std_dim.append(dname) + else: + std_dim = None + break + # end if + # end for + if std_dim is not None: + std_dims.append(':'.join(std_dim)) + else: + break + # end if + # end for + + return std_dims def prop_list(self, prop_name): - "Return list of <prop_name> values for this scheme's arguments" - return self._variables.prop_list(prop_name) + """Return list of <prop_name> values for this scheme's arguments""" + return self.__variables.prop_list(prop_name) + + def section_table_mismatch(self, table_title, table_type): + """Return an error string if this arg table does not match its + metadata table parent. If they match , return an empty string.""" + mismatch = "" + # The header type must match its table's type + if self.header_type is None: + mstr = "Invalid section type, 'None'" + mismatch += mstr.format(self.header_type, table_type) + elif table_type != self.header_type: + mstr = "Section type, '{}', does not match table type, '{}'" + mismatch += mstr.format(self.header_type, table_type) + # end if + if self.header_type == SCHEME_HEADER_TYPE: + # For schemes, strip off the scheme function phase (e.g., _init) + sect_func, _, _ = CCPP_STATE_MACH.function_match(self.title) + else: + sect_func = self.title + # end if + # The Fortran parser cannot tell a scheme from a host subroutine + # Detect this and adjust + if sect_func is None: + sect_func = self.title + # end if + # The header name (minus phase) must match its table's name + if table_title != sect_func: + if mismatch: + mismatch += '\n' + # end if + mstr = "Section name, '{}', does not match table title, '{}'" + mismatch += mstr.format(self.title, table_title) + # end if + if mismatch: + mismatch += context_string(self.__pobj) + # end if + return mismatch - def variable_start(self, line): + @staticmethod + def variable_start(line, pobj): """Return variable name if <line> is an interface metadata table header """ if line is None: match = None else: - match = MetadataHeader.__var_start__.match(line) - # End if + match = MetadataSection.__var_start.match(line) + if match is None: + match = MetadataSection.__vref_start.match(line) + if match is not None: + name = match.group(1)+'('+match.group(2)+')' + # end if + else: + name = match.group(1) + # end if + # end if if match is not None: - name = match.group(1) - if not MetadataHeader.is_scalar_reference(name): - raise ParseSyntaxError("local variable name", - token=name, context=self._pobj) - # End if + if not MetadataSection.is_scalar_reference(name): + pobj.add_syntax_err("local variable name", token=name) + name = None + # end if else: name = None - # End if + # end if return name + def write_to_file(self, filename, append=False): + """Write this metadata table to <filename>. If <append> is True, + append this table to the end of <filename>, otherwise, create + or truncate the file.""" + if append: + oflag = 'a' + else: + oflag = 'w' + # end if + with open(filename, oflag) as mfile: + mfile.write("[ccpp-arg-table]") + mfile.write(" name = {}".format(self.title)) + mfile.write(" type = {}".format(self.header_type)) + for var in self.variable_list(): + var.write_metadata(mfile) + # end for + # end with + def __repr__(self): - base = super(MetadataHeader, self).__repr__() + base = super(MetadataSection, self).__repr__() pind = base.find(' object ') if pind >= 0: pre = base[0:pind] else: - pre = '<MetadataHeader' - # End if + pre = '<MetadataSection' + # end if bind = base.find('at 0x') if bind >= 0: post = base[bind:] else: post = '>' - # End if + # end if return '{} {} / {} {}'.format(pre, self.module, self.title, post) def __del__(self): try: - del self._variables - super(MetadataHeader, self).__del__() - except Exception as e: - pass # Python does not guarantee much about __del__ conditions - # End try + del self.__variables + except AttributeError: + pass + + def start_context(self, with_comma=True, nodir=True): + """Return a context string for the beginning of the table""" + return context_string(self.__start_context, + with_comma=with_comma, nodir=nodir) @property def title(self): - 'Return the name of the metadata arg_table' - return self._table_title + """Return the name of the metadata arg_table""" + return self.__section_title @property def module(self): - 'Return the module name for this header (if it exists)' - return self._module_name + """Return the module name for this header (if it exists)""" + return self.__module_name @property def header_type(self): - 'Return the type of structure this header documents' - return self._header_type + """Return the type of structure this header documents""" + return self.__header_type @property - def dependencies(self): - 'Return the dependencies of the metadata scheme properties table' - return self._dependencies + def process_type(self): + """Return the type of physical process this header documents""" + return self.__process_type @property - def property_table(self): - 'Return True iff table is a ccpp-table-properties table' - return self._property_table + def has_variables(self): + """Convenience function for finding empty headers""" + return self.__variables - @classmethod - def is_blank(cls, line): - "Return True iff <line> is a valid config format blank or comment line" - return (len(line) == 0) or (cls.__blank_line__.match(line) is not None) + @property + def valid(self): + """Return True iff we did not encounter an error creating + this section""" + return self.__section_valid + + def __str__(self): + '''Print string for MetadataSection objects''' + return "<{} {}>".format(self.__class__.__name__, self.title) @classmethod - def table_start(cls, line): - """Return variable name if <line> is an interface metadata table header + def header_start(cls, line): + """Return True iff <line> is a Metadata section header (ccpp-arg-table). """ - if (line is None) or cls.is_blank(line): + if (line is None) or blank_metadata_line(line): match = None else: - match = MetadataHeader.__header_start__.match(line) - # End if + match = cls.__header_start.match(line) + # end if return match is not None - @classmethod - def is_scalar_reference(cls, test_val): - return check_fortran_ref(test_val) is not None - - @classmethod - def parse_metadata_file(cls, filename): - "Parse <filename> and return list of parsed metadata headers" - # Read all lines of the file at once - mheaders = list() - with open(filename, 'r') as file: - fin_lines = file.readlines() - for index in range(len(fin_lines)): - fin_lines[index] = fin_lines[index].rstrip('\n') - # End for - # End with - # Look for a header start - parse_obj = ParseObject(filename, fin_lines) - curr_line, curr_line_num = parse_obj.curr_line() - while curr_line is not None: - if MetadataHeader.table_start(curr_line): - if '[ccpp-table-properties]' in curr_line: - mheaders.append(MetadataHeader(parse_obj, property_table=True)) - else: - mheaders.append(MetadataHeader(parse_obj)) - curr_line, curr_line_num = parse_obj.curr_line() - else: - curr_line, curr_line_num = parse_obj.next_line() - # End if - # End while - return mheaders + @staticmethod + def is_scalar_reference(test_val): + """Return True iff <test_val> refers to a Fortran scalar.""" + return check_fortran_ref(test_val, None, False) is not None ######################################################################## diff --git a/scripts/metavar.py b/scripts/metavar.py index 64768e83..d7fb1173 100755 --- a/scripts/metavar.py +++ b/scripts/metavar.py @@ -1,24 +1,110 @@ #!/usr/bin/env python -# -# Class to hold all information on a CCPP metadata variable -# + +""" +Classes and supporting code to hold all information on CCPP metadata variables +VariableProperty: Class which describes a single variable property +Var: Class which holds all information on a single CCPP metadata variable +VarSpec: Class to hold a standard_name description which can include dimensions +VarAction: Base class for describing actions on variables +VarLoopSubst: Class for describing a loop substitution +VarDictionary: Class to hold all CCPP variables of a CCPP unit (e.g., suite, + scheme, host) +""" # Python library imports from __future__ import print_function import re -import xml.etree.ElementTree as ET from collections import OrderedDict # CCPP framework imports -from parse_tools import check_fortran_ref, check_fortran_type, context_string -from parse_tools import FORTRAN_DP_RE, FORTRAN_ID -from parse_tools import registered_fortran_ddt_name -from parse_tools import check_dimensions, check_cf_standard_name +from parse_tools import check_local_name, check_fortran_type, context_string +from parse_tools import FORTRAN_DP_RE, FORTRAN_SCALAR_REF_RE, fortran_list_match +from parse_tools import check_units, check_dimensions, check_cf_standard_name +from parse_tools import check_diagnostic_id, check_diagnostic_fixed +from parse_tools import check_default_value, check_valid_values from parse_tools import ParseContext, ParseSource from parse_tools import ParseInternalError, ParseSyntaxError, CCPPError ############################################################################### -real_subst_re = re.compile(r"(.*\d)p(\d.*)") -list_re = re.compile(r"[(]([^)]*)[)]\s*$") +_REAL_SUBST_RE = re.compile(r"(.*\d)p(\d.*)") + +# Dictionary of standard CCPP variables +CCPP_STANDARD_VARS = { + # Variable representing the constant integer, 1 + 'ccpp_constant_one' : + {'local_name' : '1', 'protected' : 'True', + 'standard_name' : 'ccpp_constant_one', + 'long_name' : "CCPP constant one", + 'units' : '1', 'dimensions' : '()', 'type' : 'integer'}, + 'ccpp_error_flag' : + {'local_name' : 'errflg', 'standard_name' : 'ccpp_error_flag', + 'long_name' : "CCPP error flag", + 'units' : 'flag', 'dimensions' : '()', 'type' : 'integer'}, + 'ccpp_error_message' : + {'local_name' : 'errmsg', 'standard_name' : 'ccpp_error_message', + 'long_name' : "CCPP error message", + 'units' : '1', 'dimensions' : '()', 'type' : 'character', + 'kind' : 'len=512'}, + 'horizontal_dimension' : + {'local_name' : 'total_columns', + 'standard_name' : 'horizontal_dimension', 'units' : 'count', + 'long_name' : "total number of columns", + 'dimensions' : '()', 'type' : 'integer'}, + 'horizontal_loop_extent' : + {'local_name' : 'horz_loop_ext', + 'standard_name' : 'horizontal_loop_extent', 'units' : 'count', + 'dimensions' : '()', 'type' : 'integer'}, + 'horizontal_loop_begin' : + {'local_name' : 'horz_col_beg', + 'standard_name' : 'horizontal_loop_begin', 'units' : 'count', + 'dimensions' : '()', 'type' : 'integer'}, + 'horizontal_loop_end' : + {'local_name' : 'horz_col_end', + 'standard_name' : 'horizontal_loop_end', 'units' : 'count', + 'dimensions' : '()', 'type' : 'integer'}, + 'vertical_layer_dimension' : + {'local_name' : 'num_model_layers', + 'standard_name' : 'vertical_layer_dimension', 'units' : 'count', + 'dimensions' : '()', 'type' : 'integer'}, + 'vertical_interface_dimension' : + {'local_name' : 'num_model_interfaces', + 'standard_name' : 'vertical_interface_dimension', 'units' : 'count', + 'dimensions' : '()', 'type' : 'integer'}, + 'vertical_interface_index' : + {'local_name' : 'layer_index', + 'standard_name' : 'vertical_interface_index', 'units' : 'count', + 'dimensions' : '()', 'type' : 'integer'} +} + +# Pythonic version of a forward reference (CCPP_CONSTANT_VARS defined below) +CCPP_CONSTANT_VARS = {} +# Pythonic version of a forward reference (CCPP_VAR_LOOP_SUBST defined below) +CCPP_VAR_LOOP_SUBSTS = {} +# Loop variables only allowed during run phases +CCPP_LOOP_VAR_STDNAMES = ['horizontal_loop_extent', + 'horizontal_loop_begin', 'horizontal_loop_end', + 'vertical_layer_index', 'vertical_interface_index'] + +############################################################################### +# Supported horizontal dimensions (should be defined in CCPP_STANDARD_VARS) +CCPP_HORIZONTAL_DIMENSIONS = ['ccpp_constant_one:horizontal_dimension', + 'ccpp_constant_one:horizontal_loop_extent', + 'horizontal_loop_begin:horizontal_loop_end', + 'horizontal_loop_extent'] + +############################################################################### +# Supported vertical dimensions (should be defined in CCPP_STANDARD_VARS) +CCPP_VERTICAL_DIMENSIONS = ['ccpp_constant_one:vertical_layer_dimension', + 'ccpp_constant_one:vertical_interface_dimension', + 'vertical_layer_index', 'vertical_interface_index'] + +############################################################################### +# Substituions for run time dimension control +CCPP_LOOP_DIM_SUBSTS = {'ccpp_constant_one:horizontal_dimension' : + 'horizontal_loop_begin:horizontal_loop_end', + 'ccpp_constant_one:vertical_layer_dimension' : + 'vertical_layer_index', + 'ccpp_constant_one:vertical_interface_dimension' : + 'vertical_interface_index'} ######################################################################## def standard_name_to_long_name(prop_dict, context=None): @@ -43,27 +129,29 @@ def standard_name_to_long_name(prop_dict, context=None): # Make the first char uppercase and replace each underscore with a space if 'standard_name' in prop_dict: standard_name = prop_dict['standard_name'] - if len(standard_name) > 0: - long_name = standard_name[0].upper() + re.sub("_", " ", standard_name[1:]) + if standard_name: + long_name = standard_name[0].upper() + re.sub("_", " ", + standard_name[1:]) else: long_name = '' - # End if + # end if # Next, substitute a decimal point for the p in [:digit]p[:digit] - match = real_subst_re.match(long_name) + match = _REAL_SUBST_RE.match(long_name) while match is not None: long_name = match.group(1) + '.' + match.group(2) - match = real_subst_re.match(long_name) - # End while + match = _REAL_SUBST_RE.match(long_name) + # end while else: long_name = '' if 'local_name' in prop_dict: lname = ' {}'.format(prop_dict['local_name']) else: lname = '' - # End if + # end if ctxt = context_string(context) - raise CCPPError('No standard name to convert{} to long name{}'.format(lname, ctxt)) - # End if + emsg = 'No standard name to convert{} to long name{}' + raise CCPPError(emsg.format(lname, ctxt)) + # end if return long_name ######################################################################## @@ -105,33 +193,21 @@ def default_kind_val(prop_dict, context=None): kind = 'kind_phys' else: kind = '' - # End if + # end if else: kind = '' if 'local_name' in prop_dict: lname = ' {}'.format(prop_dict['local_name']) + errmsg = 'No type to find default kind for {ln}{ct}' else: lname = '' - # End if + errmsg = 'No type to find default kind{ct}' + # end if ctxt = context_string(context) - raise CCPPError('No type to find default kind for {}{}'.format(lname, ctxt)) - # End if + raise CCPPError(errmsg.format(ln=lname, ct=ctxt)) + # end if return kind -######################################################################## -def ddt_modules(variable_list): -######################################################################## - ddt_mods = set() - for var in variable_list: - if var.is_ddt(): - module = var.get_prop_value('module') - if len(module) > 0: - ddt_mods.add((module, var.get_prop_value('type'))) - # End if - # End if - # End for - return ddt_mods - ######################################################################## class VariableProperty(object): @@ -158,8 +234,8 @@ class VariableProperty(object): <__main__.VariableProperty object at ...> >>> VariableProperty('local_name', str).name 'local_name' - >>> VariableProperty('standard_name', str).type - <type 'str'> + >>> VariableProperty('standard_name', str).type == str + True >>> VariableProperty('units', str).is_match('units') True >>> VariableProperty('units', str).is_match('UNITS') @@ -173,6 +249,13 @@ class VariableProperty(object): >>> VariableProperty('value', int, valid_values_in=[1, 2 ]).valid_value('3', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: Invalid value variable property, '3' + >>> VariableProperty('units', str, check_fn_in=check_units).valid_value('m s-1') + 'm s-1' + >>> VariableProperty('units', str, check_fn_in=check_units).valid_value(' ') + + >>> VariableProperty('units', str, check_fn_in=check_units).valid_value(' ', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: ' ' is not a valid unit >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('()') [] >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x)') @@ -183,111 +266,147 @@ class VariableProperty(object): ['x:y'] >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(w:x,y:z)') ['w:x', 'y:z'] + >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value(['size(foo)']) + ['size(foo)'] >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(w:x,x:y:z:q)', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'x:y:z:q' is an invalid dimension range >>> VariableProperty('dimensions', list, check_fn_in=check_dimensions).valid_value('(x:3y)', error=True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: '3y' is not a valid Fortran identifier + >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('foo') + 'foo' + >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('foo(bar)') + 'foo(bar)' + >>> VariableProperty('local_name', str, check_fn_in=check_local_name).valid_value('q(:,:,index_of_water_vapor_specific_humidity)') + 'q(:,:,index_of_water_vapor_specific_humidity)' """ __true_vals = ['t', 'true', '.true.'] __false_vals = ['f', 'false', '.false.'] - def __init__(self, name_in, type_in, valid_values_in=None, optional_in=False, default_in=None, default_fn_in=None, check_fn_in=None): + def __init__(self, name_in, type_in, valid_values_in=None, + optional_in=False, default_in=None, default_fn_in=None, + check_fn_in=None, mult_entry_ok=False): + """Conduct sanity checks and initialize this variable property.""" self._name = name_in self._type = type_in - if self._type not in [ bool, int, list, str ]: - raise CCPPError("{} has invalid VariableProperty type, '{}'".format(name_in, type_in)) - # End if + if self._type not in [bool, int, list, str]: + emsg = "{} has invalid VariableProperty type, '{}'" + raise CCPPError(emsg.format(name_in, type_in)) + # end if self._valid_values = valid_values_in self._optional = optional_in + self._default = None + self._default_fn = None if self.optional: if (default_in is None) and (default_fn_in is None): - raise CCPPError('default_in or default_fn_in is a required property for {} because it is optional'.format(name_in)) + emsg = 'default_in or default_fn_in is a required property for {} because it is optional' + raise CCPPError(emsg.format(name_in)) if (default_in is not None) and (default_fn_in is not None): - raise CCPPError('default_in and default_fn_in cannot both be provided') + emsg = 'default_in and default_fn_in cannot both be provided' + raise CCPPError(emsg) self._default = default_in self._default_fn = default_fn_in elif default_in is not None: - raise CCPPError('default_in is not a valid property for {} because it is not optional'.format(name_in)) + emsg = 'default_in is not a valid property for {} because it is not optional' + raise CCPPError(emsg.format(name_in)) elif default_in is not None: - raise CCPPError('default_fn_in is not a valid property for {} because it is not optional'.format(name_in)) + emsg = 'default_fn_in is not a valid property for {} because it is not optional' + raise CCPPError(emsg.format(name_in)) self._check_fn = check_fn_in + self._add_multiple_ok = mult_entry_ok @property def name(self): - 'Return the name of the property' + """Return the name of the property""" return self._name @property def type(self): - 'Return the type of the property' + """Return the type of the property""" return self._type + @property + def has_default_func(self): + """Return True iff this variable property has a default function""" + return self._default_fn is not None + def get_default_val(self, prop_dict, context=None): - if self._default_fn is not None: + """Return this variable property's default value or raise an + exception if there is no default value or default value function.""" + if self.has_default_func: return self._default_fn(prop_dict, context) - elif self._default is not None: + # end if + if self._default is not None: return self._default - else: - ctxt = context_string(context) - raise CCPPError('No default for variable property {}{}'.format(self.name, ctxt)) - # End if + # end if + ctxt = context_string(context) + emsg = 'No default for variable property {}{}' + raise CCPPError(emsg.format(self.name, ctxt)) + @property def optional(self): + """Return True iff this variable property is optional""" return self._optional + @property + def add_multiple(self): + """Return True iff multiple entries of this property should be + accumulated. If False, it should either be an error or new + instances should replace the old, however, this functionality + must be implemented by the calling routine (e.g., Var)""" + return self._add_multiple_ok + def is_match(self, test_name): - "Return True iff <test_name> is the name of this property" + """Return True iff <test_name> is the name of this property""" return self.name.lower() == test_name.lower() - def valid_value(self, test_value, error=False): - 'Return a sanitized version of test_value if valid, otherwise return None or abort' + def valid_value(self, test_value, prop_dict=None, error=False): + """Return a valid version of <test_value> if it is valid. + If <test_value> is not valid, return None or raise an exception, + depending on the value of <error>. + If <prop_dict> is not None, it may be used in value validation. + """ valid_val = None if self.type is int: try: - tv = int(test_value) + tval = int(test_value) if self._valid_values is not None: - if tv in self._valid_values: - valid_val = tv + if tval in self._valid_values: + valid_val = tval else: valid_val = None # i.e. pass else: - valid_val = tv + valid_val = tval except CCPPError: valid_val = None # Redundant but more expressive than pass elif self.type is list: if isinstance(test_value, str): - match = list_re.match(test_value) - if match is None: - tv = None - else: - tv = [x.strip() for x in match.group(1).split(',')] - if (len(tv) == 1) and (len(tv[0]) == 0): - # Scalar - tv = list() - # End if - # End if + tval = fortran_list_match(test_value) + if tval and (len(tval) == 1) and (not tval[0]): + # Scalar + tval = list() + # end if else: - tv = test_value - # End if - if isinstance(tv, list): - valid_val = tv - elif isinstance(tv, tuple): - valid_val = list(tv) + tval = test_value + # end if + if isinstance(tval, list): + valid_val = tval + elif isinstance(tval, tuple): + valid_val = list(tval) else: valid_val = None - # End if + # end if if (valid_val is not None) and (self._valid_values is not None): # Special case for lists, _valid_values applies to elements for item in valid_val: if item not in self._valid_values: valid_val = None break - # End if - # End for + # end if + # end for else: pass elif self.type is bool: @@ -296,8 +415,9 @@ def valid_value(self, test_value, error=False): valid_val = test_value.lower() in VariableProperty.__true_vals else: valid_val = None # i.e., pass + # end if else: - valid_val = not not test_value + valid_val = not not test_value # pylint: disable=unneeded-not elif self.type is str: if isinstance(test_value, str): if self._valid_values is not None: @@ -307,21 +427,23 @@ def valid_value(self, test_value, error=False): valid_val = None # i.e., pass else: valid_val = test_value - # End if - # End if - # End if + # end if + # end if + # end if # Call a check function? if valid_val and (self._check_fn is not None): - valid_val = self._check_fn(valid_val, error=error) + valid_val = self._check_fn(valid_val, prop_dict, error) elif (valid_val is None) and error: - raise CCPPError("Invalid {} variable property, '{}'".format(self.name, test_value)) - # End if + emsg = "Invalid {} variable property, '{}'" + raise CCPPError(emsg.format(self.name, test_value)) + # end if return valid_val ############################################################################### class Var(object): - """ A class to hold a metadata variable + """ A class to hold a metadata or code variable. + Var objects should be treated as immutable. >>> Var.get_prop('standard_name') #doctest: +ELLIPSIS <__main__.VariableProperty object at 0x...> >>> Var.get_prop('standard') @@ -348,28 +470,39 @@ class Var(object): 'Hi mom' >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('intent') 'in' - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'ttype' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())) + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('units') + 'm/s' + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('units') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseSyntaxError: Required property, 'units', missing, in <standard input> + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : ' ', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())).get_prop_value('units') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseSyntaxError: foo: ' ' is not a valid unit, in <standard input> + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'ttype' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid metadata variable property, 'ttype', in <standard input> - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())) + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Required property, 'units', missing, in <standard input> - >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'inout', 'constant' : '.true.'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'inout', 'protected' : '.true.'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): - ParseSyntaxError: foo is marked constant but is intent inout, at <standard input>:1 + ParseSyntaxError: foo is marked protected but is intent inout, at <standard input>:1 >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'ino'}, ParseSource('vname', 'SCHEME', ParseContext())) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid intent variable property, 'ino', at <standard input>:1 """ + ## Prop lists below define all the allowed CCPP Metadata attributes + # __spec_props are for variables defined in a specification __spec_props = [VariableProperty('local_name', str, - check_fn_in=check_fortran_ref), + check_fn_in=check_local_name), VariableProperty('standard_name', str, check_fn_in=check_cf_standard_name), VariableProperty('long_name', str, optional_in=True, default_fn_in=standard_name_to_long_name), - VariableProperty('units', str), + VariableProperty('units', str, + check_fn_in=check_units), VariableProperty('dimensions', list, check_fn_in=check_dimensions), VariableProperty('type', str, @@ -379,131 +512,215 @@ class Var(object): default_fn_in=default_kind_val), VariableProperty('state_variable', bool, optional_in=True, default_in=False), - VariableProperty('constant', bool, + VariableProperty('optional', bool, + optional_in=True, default_in=False), + VariableProperty('protected', bool, optional_in=True, default_in=False), VariableProperty('allocatable', bool, optional_in=True, default_in=False), + VariableProperty('diagnostic_name', str, + optional_in=True, default_in='', + check_fn_in=check_diagnostic_id), + VariableProperty('diagnostic_name_fixed', str, + optional_in=True, default_in='', + check_fn_in=check_diagnostic_fixed), + VariableProperty('default_value', str, + optional_in=True, default_in='', + check_fn_in=check_default_value), VariableProperty('persistence', str, optional_in=True, valid_values_in=['timestep', 'run'], default_in='timestep'), VariableProperty('active', str, optional_in=True, default_in='.true.')] +# XXgoldyXX: v debug only + __to_add = VariableProperty('valid_values', str, + optional_in=True, default_in='', + check_fn_in=check_valid_values) +# XXgoldyXX: ^ debug only + # __var_props contains properties which are not in __spec_props - __var_props = [VariableProperty('optional', bool, - optional_in=True, default_in=False), - VariableProperty('intent', str, + __var_props = [VariableProperty('intent', str, valid_values_in=['in', 'out', 'inout'])] + # __constituent_props contains properties associated only with constituents + # Note that all constituent properties must be optional and contain either + # a default value or default function. + __constituent_props = [VariableProperty('advected', bool, + optional_in=True, default_in=False)] - __spec_propdict = {} - __var_propdict = {} + __constituent_prop_dict = {x.name : x for x in __constituent_props} + + # __no_metadata_props__ contains properties to omit from metadata + __no_metadata_props__ = ['local_name'] + + __spec_propdict = {p.name : p for p in __spec_props} + __var_propdict = {p.name : p for p in __spec_props + __var_props} __required_spec_props = list() __required_var_props = list() for p in __spec_props: - __spec_propdict[p.name] = p __var_propdict[p.name] = p if not p.optional: __required_spec_props.append(p.name) __required_var_props.append(p.name) - # End if - # End for + # end if + # end for for p in __var_props: - __var_propdict[p.name] = p +# XXgoldyXX: v why? +# __spec_propdict[p.name] = p +# XXgoldyXX: ^ why? +# __var_propdict[p.name] = p if not p.optional: __required_var_props.append(p.name) - # End if - # End for - - def __init__(self, prop_dict, source, invalid_ok=False, logger=None): - """NB: invalid_ok=True is dangerous because it allows creation - of a Var object with invalid properties. - In order to prevent silent failures, invalid_ok requires a logger - in order to take effect.""" - if source.type == 'SCHEME': - required_props = Var.__required_var_props - master_propdict = Var.__var_propdict + # end if + # end for + __var_propdict.update({p.name : p for p in __constituent_props}) + # All constituent props are optional so no check + + def __init__(self, prop_dict, source, context=None, + invalid_ok=False, logger=None, clone_source=None): + """Initialize a new Var object. + NB: <invalid_ok>=True is dangerous because it allows creation + of a Var object with invalid properties. + In order to prevent silent failures, <invalid_ok> requires a logger + (passed through the <logger> input) in order to take effect. + If <prop_dict> is really a Var object, use that object's prop_dict. + If this Var object is a clone, record the original Var object + for reference + """ + self.__parent_var = None # for array references + self.__children = list() # This Var's array references + self.__clone_source = clone_source + if isinstance(prop_dict, Var): + prop_dict = prop_dict.copy_prop_dict() + # end if + if source.type == 'scheme': + self.__required_props = Var.__required_var_props +# XXgoldyXX: v don't fill in default properties? +# mstr_propdict = Var.__var_propdict +# XXgoldyXX: ^ don't fill in default properties? else: - required_props = Var.__required_spec_props - master_propdict = Var.__spec_propdict - # End if + self.__required_props = Var.__required_spec_props +# XXgoldyXX: v don't fill in default properties? + mstr_propdict = Var.__spec_propdict +# XXgoldyXX: ^ don't fill in default properties? + # end if self._source = source # Grab a frozen copy of the context - self._context = ParseContext(context=source.context) + if context is None: + self._context = ParseContext(context=source.context) + else: + self._context = context + # end if # First, check the input if 'ddt_type' in prop_dict: # Special case to bypass normal type rules if 'type' not in prop_dict: prop_dict['type'] = prop_dict['ddt_type'] - # End if + # end if if 'units' not in prop_dict: prop_dict['units'] = "" - # End if + # end if prop_dict['kind'] = prop_dict['ddt_type'] del prop_dict['ddt_type'] - # End if + self.__intrinsic = False + else: + self.__intrinsic = True + # end if for key in prop_dict: if Var.get_prop(key) is None: raise ParseSyntaxError("Invalid metadata variable property, '{}'".format(key), context=self.context) - # End if - # End for + # end if + # end for # Make sure required properties are present - for propname in required_props: + for propname in self.__required_props: if propname not in prop_dict: if invalid_ok and (logger is not None): ctx = context_string(self.context) logger.warning("Required property, '{}', missing{}".format(propname, ctx)) else: - raise ParseSyntaxError("Required property, '{}', missing".format(propname), context=self.context) - # End if - # End if - # End for + emsg = "Required property, '{}', missing" + raise ParseSyntaxError(emsg.format(propname), + context=self.context) + # end if + # end if + # end for # Check for any mismatch - if ('constant' in prop_dict) and ('intent' in prop_dict): - if prop_dict['intent'].lower() != 'in': + if ('protected' in prop_dict) and ('intent' in prop_dict): + if (prop_dict['intent'].lower() != 'in') and prop_dict['protected']: if invalid_ok and (logger is not None): ctx = context_string(self.context) - logger.warning("{} is marked constant but is intent {}{}".format(prop_dict['local_name'], prop_dict['intent'], ctx)) + wmsg = "{} is marked protected but is intent {}{}" + logger.warning(wmsg.format(prop_dict['local_name'], + prop_dict['intent'], ctx)) else: - raise ParseSyntaxError("{} is marked constant but is intent {}".format(prop_dict['local_name'], prop_dict['intent']), context=self.context) - # End if - # End if - # End if + emsg = "{} is marked protected but is intent {}" + raise ParseSyntaxError(emsg.format(prop_dict['local_name'], + prop_dict['intent']), + context=self.context) + # end if + # end if + # end if + # Look for any constituent properties + self.__is_constituent = False + for name, prop in Var.__constituent_prop_dict.items(): + if (name in prop_dict) and \ + (prop_dict[name] != prop.get_default_val(prop_dict, + context=self.context)): + self.__is_constituent = True + break + # end if + # end for # Steal dict from caller self._prop_dict = prop_dict - # Fill in default values for missing properties - for propname in master_propdict: - if (propname not in prop_dict) and master_propdict[propname].optional: - self._prop_dict[propname] = master_propdict[propname].get_default_val(self._prop_dict, context=self.context) - # End if - # End for +# XXgoldyXX: v don't fill in default properties? +# # Fill in default values for missing properties +# for propname in mstr_propdict: +# if (propname not in prop_dict) and mstr_propdict[propname].optional: +# mval = mstr_propdict[propname] +# def_val = mval.get_default_val(self._prop_dict, +# context=self.context) +# self._prop_dict[propname] = def_val +# # end if +# # end for +# XXgoldyXX: ^ don't fill in default properties? # Make sure all the variable values are valid try: - for prop in self._prop_dict.keys(): - check = Var.get_prop(prop).valid_value(self._prop_dict[prop], - error=True) - # End for - except CCPPError as cp: + for prop_name, prop_val in self.var_properties(): + prop = Var.get_prop(prop_name) + _ = prop.valid_value(prop_val, + prop_dict=self._prop_dict, error=True) + # end for + except CCPPError as cperr: if invalid_ok and (logger is not None): ctx = context_string(self.context) - logger.warning("{}: {}{}".format(self._prop_dict['local_name'], cp, ctx)) + wmsg = "{}: {}{}" + logger.warning(wmsg.format(self._prop_dict['local_name'], + cperr, ctx)) else: - raise ParseSyntaxError("{}: {}".format(self._prop_dict['local_name'], cp), + emsg = "{}: {}" + lname = self._prop_dict['local_name'] + raise ParseSyntaxError(emsg.format(lname, cperr), context=self.context) - # End if - # End try + # end if + # end try def compatible(self, other, logger=None): - # We accept character(len=*) as compatible with character(len=INTEGER_VALUE) - stype = self.get_prop_value('type') - skind = self.get_prop_value('kind') - sunits = self.get_prop_value('units') - srank= self.get_prop_value('tank') + """Return True, None iff <other> is compatible with self. + If not compatible, return False,<reason> where <reason> is + a string describing the incompatibility. + """ + # We accept character(len=*) as compatible with + # character(len=INTEGER_VALUE) + compat = False + reason = None + stype = self.get_prop_value('type') + skind = self.get_prop_value('kind') + sunits = self.get_prop_value('units') sstd_name = self.get_prop_value('standard_name') - otype = other.get_prop_value('type') - okind = other.get_prop_value('kind') - ounits = other.get_prop_value('units') - orank= other.get_prop_value('tank') + otype = other.get_prop_value('type') + okind = other.get_prop_value('kind') + ounits = other.get_prop_value('units') ostd_name = other.get_prop_value('standard_name') if stype == 'character': kind_eq = ((skind == okind) or @@ -511,233 +728,775 @@ def compatible(self, other, logger=None): (skind.startswith('len=') and okind == 'len=*')) else: kind_eq = skind == okind - # End if + # end if if ((sstd_name == ostd_name) and kind_eq and - (sunits == ounits) and (stype == otype) and (srank == orank)): - return True - elif logger is not None: + (sunits == ounits) and (stype == otype)): + compat = True + else: + logger_str = None + error_str = None if sstd_name != ostd_name: - logger.info("standard_name: '{}' != '{}'".format(sstd_name, ostd_name)) + logger_str = "standard_name: '{}' != '{}'".format(sstd_name, + ostd_name) + reason = 'standard_name' elif not kind_eq: - logger.info("kind: '{}' != '{}'".format(skind, okind)) + logger_str = "kind: '{}' != '{}'".format(skind, okind) + reason = 'kind' elif sunits != ounits: - logger.info("units: '{}' != '{}'".format(sunits, ounits)) + logger_str = "units: '{}' != '{}'".format(sunits, ounits) + reason = 'units' elif stype != otype: - logger.info("type: '{}' != '{}'".format(stype, otype)) - elif srank != orank: - logger.info("rank: '{}' != '{}'".format(srank, orank)) + logger_str = "type: '{}' != '{}'".format(stype, otype) + reason = 'type' else: - logger.error('Why are these variables not compatible?') - # End if - return False + error_str = 'Why are these variables not compatible?' + reason = 'UNKNOWN' + # end if + if logger is not None: + if error_str is not None: + logger.error('{}'.format(error_str)) + elif logger_str is not None: + logger.info('{}'.format(logger_str)) + # end if (no else) + # end if + # end if + return compat, reason + + def adjust_intent(self, src_var): + """Add an intent to this Var or adjust its existing intent. + Note: An existing intent can only be adjusted to 'inout' + """ + if 'intent' in self._prop_dict: + my_intent = self.get_prop_value('intent') + else: + my_intent = None + # end if + sv_intent = src_var.get_prop_value('intent') + if not sv_intent: + sv_intent = 'in' + # end if + if sv_intent in ['inout', 'out'] and self.get_prop_value('protected'): + lname = self.get_prop_value('local_name') + lctx = context_string(self.context) + emsg = "Attempt to set intent of {}{} to {}, only 'in' allowed " + emsg += "for 'protected' variable." + if src_var: + slname = src_var.get_prop_value('local_name') + sctx = context_string(src_var.context) + emsg += "\nintent source: {}{}".format(slname, sctx) + # end if + raise CCPPError(emsg.format(lname, lctx, sv_intent)) + # end if (else, no error) + if my_intent: + if my_intent != sv_intent: + self._prop_dict['intent'] = 'inout' + # end if (no else, intent is okay) else: - return False - # End if + self._prop_dict['intent'] = sv_intent + # end if @classmethod def get_prop(cls, name, spec_type=None): + """Return VariableProperty object for <name> or None""" + prop = None if (spec_type is None) and (name in Var.__var_propdict): - return Var.__var_propdict[name] + prop = Var.__var_propdict[name] elif (spec_type is not None) and (name in Var.__spec_propdict): - return Var.__spec_propdict[name] - else: - return None + prop = Var.__spec_propdict[name] + # end if (else prop = None) + return prop + + @classmethod + def is_horizontal_dimension(cls, dim_name): + """Return True if it is a recognized horizontal + dimension or index, otherwise, return False + >>> Var.is_horizontal_dimension('horizontal_loop_extent') + True + >>> Var.is_horizontal_dimension('ccpp_constant_one:horizontal_loop_extent') + True + >>> Var.is_horizontal_dimension('ccpp_constant_one:horizontal_dimension') + True + >>> Var.is_horizontal_dimension('horizontal_loop_begin:horizontal_loop_end') + True + >>> Var.is_horizontal_dimension('horizontal_loop_begin:horizontal_loop_extent') + False + >>> Var.is_horizontal_dimension('ccpp_constant_one') + False + """ + return dim_name in CCPP_HORIZONTAL_DIMENSIONS + + @classmethod + def is_vertical_dimension(cls, dim_name): + """Return True if it is a recognized vertical + dimension or index, otherwise, return False + >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_layer_dimension') + True + >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_interface_dimension') + True + >>> Var.is_vertical_dimension('vertical_layer_index') + True + >>> Var.is_vertical_dimension('vertical_interface_index') + True + >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_layer_index') + False + >>> Var.is_vertical_dimension('ccpp_constant_one:vertical_interface_index') + False + >>> Var.is_vertical_dimension('horizontal_loop_extent') + False + """ + return dim_name in CCPP_VERTICAL_DIMENSIONS + + @classmethod + def find_horizontal_dimension(cls, dims): + """Return the horizontal dimension string and location in <dims> + or (None, -1). + Return form is (horizontal_dimension, index) where index is the + location of horizontal_dimension in <dims>""" + var_hdim = None + hindex = -1 + for index, dimname in enumerate(dims): + if Var.is_horizontal_dimension(dimname): + var_hdim = dimname + hindex = index + break + # end if + # end for + return (var_hdim, hindex) + + @classmethod + def find_vertical_dimension(cls, dims): + """Return the vertical dimension string and location in <dims> + or (None, -1). + Return form is (vertical_dimension, index) where index is the + location of vertical_dimension in <dims>""" + var_vdim = None + vindex = -1 + for index, dimname in enumerate(dims): + if Var.is_vertical_dimension(dimname): + var_vdim = dimname + vindex = index + break + # end if + # end for + return (var_vdim, vindex) + + def var_properties(self): + """Return an iterator for this Var's property dictionary""" + return self._prop_dict.items() + + def copy_prop_dict(self, subst_dict=None): + """Create a copy of our prop_dict, possibly substituting properties + from <subst_dict>.""" + cprop_dict = {} + # Start with a straight copy of this variable's prop_dict + for prop, val in self.var_properties(): + cprop_dict[prop] = val + # end for + # Now add or substitute properties from <subst_dict> + if subst_dict: + for prop in subst_dict.keys(): + cprop_dict[prop] = subst_dict[prop] + # end for + # end if + # Special key for creating a copy of a DDT (see Var.__init__) + if self.is_ddt(): + cprop_dict['ddt_type'] = cprop_dict['type'] + # end if + return cprop_dict + + def clone(self, subst_dict=None, remove_intent=False, + source_name=None, source_type=None, context=None): + """Create a clone of this Var object with properties from <subst_dict> + overriding this variable's properties. <subst_dict> may also be + a string in which case only the local_name property is changed + (to the value of the <subst_dict> string). + If <remove_intent> is True, remove the 'intent' property, if present. + This can be used to promote a variable to module level. + The optional <source_name>, <source_type>, and <context> inputs + allow the clone to appear to be coming from a designated source, + by default, the source and type are the same as this Var (self). + """ + if isinstance(subst_dict, str): + subst_dict = {'local_name':subst_dict} + elif subst_dict is None: + subst_dict = {} + # end if + cprop_dict = self.copy_prop_dict(subst_dict=subst_dict) + if remove_intent and ('intent' in cprop_dict): + del cprop_dict['intent'] + # end if + if source_name is None: + source_name = self.source.name + # end if + if source_type is None: + source_type = self.source.type + # end if + if context is None: + context = self._context + # end if + psource = ParseSource(source_name, source_type, context) + + return Var(cprop_dict, psource, clone_source=self) def get_prop_value(self, name): + """Return the value of key, <name> if <name> is in this variable's + property dictionary. + If <name> is not in the prop dict but does have a <default_fn_in> + property, return the value specified by calling that function. + Otherwise, return None + """ if name in self._prop_dict: - return self._prop_dict[name] + pvalue = self._prop_dict[name] + elif name in Var.__var_propdict: + vprop = Var.__var_propdict[name] + if vprop.has_default_func: + pvalue = vprop.get_default_val(self._prop_dict, + context=self.context) + else: + pvalue = None + # end if + else: + pvalue = None + # end if + return pvalue + + def handle_array_ref(self): + """If this Var's local_name is an array ref, add in the array + reference indices to the Var's dimensions. + Return the (stripped) local_name and the full dimensions. + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + ('foo', []) + >>> Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + ('foo', ['ccpp_constant_one:dim1']) + >>> Var({'local_name' : 'foo(:,:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1,ccpp_constant_one:dim2)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + ('foo', ['ccpp_constant_one:dim1', 'ccpp_constant_one:dim2', 'bar']) + >>> Var({'local_name' : 'foo(bar,:)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() + ('foo', ['bar', 'ccpp_constant_one:dim1']) + >>> Var({'local_name' : 'foo(bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Call dims mismatch for foo(bar), not enough colons + >>> Var({'local_name' : 'foo(:,bar,:)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Call dims mismatch for foo(:,bar,:), not enough dims + >>> Var({'local_name' : 'foo(:,:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Call dims mismatch for foo(:,:,bar), not enough dims + >>> Var({'local_name' : 'foo(:,bar)', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '(ccpp_constant_one:dim1,ccpp_constant_one:dim2)', 'type' : 'real',}, ParseSource('vname', 'HOST', ParseContext())).handle_array_ref() #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Call dims mismatch for foo(:,bar), too many dims + """ + dimlist = self.get_dimensions() + aref = self.array_ref() + if aref is not None: + lname = aref.group(1) + # Substitute dimensions for colons in array reference + sdimlist = dimlist + num_dims = len(sdimlist) + dimlist = [x.strip() for x in aref.group(2).split(',')] + num_colons = sum(dim == ':' for dim in dimlist) + cind = 0 + if num_dims > num_colons: + emsg = 'Call dims mismatch for {}, not enough colons' + lname = self.get_prop_value('local_name') + raise CCPPError(emsg.format(lname)) + # end if + for dind, dim in enumerate(dimlist): + if dim == ':': + if cind >= num_dims: + emsg = 'Call dims mismatch for {}, not enough dims' + lname = self.get_prop_value('local_name') + raise CCPPError(emsg.format(lname)) + # end if + dimlist[dind] = sdimlist[cind] + cind += 1 + # end if + # end for + if cind < num_colons: + emsg = 'Call dims mismatch for {}, too many dims' + lname = self.get_prop_value('local_name') + raise CCPPError(emsg.format(lname)) + # end if + else: + lname = self.get_prop_value('local_name') + # end if + return lname, dimlist + + def call_dimstring(self, var_dicts=None, + explicit_dims=False, loop_subst=False): + """Return the dimensions string for a variable call. + If <var_dict> is present, find and substitute a local_name for + each standard_name in this variable's dimensions. + If <var_dict> is not present, return a colon for each dimension. + If <explicit_dims> is True, include the variable's dimensions. + If <loop_subst> is True, apply a loop substitution, if found for any + missing dimension. + """ + emsg = '' + _, dims = self.handle_array_ref() + if var_dicts is not None: + dimlist = [] + sepstr = '' + for dim in dims: + # Decide whether to list all dimensions or to replace + # a range with a colon. + dstdnames = dim.split(':') + add_dims = explicit_dims or (len(dstdnames) == 1) + dvar = None + if add_dims and loop_subst: + for vdict in var_dicts: + dvar = vdict.find_loop_dim_match(dim) + if dvar is not None: + break + # end if + # end for + if dvar: + dimlist.append(dvar) + # end if + if (not dvar) and add_dims: + dnames = [] + for stdname in dstdnames: + for vdict in var_dicts: + dvar = vdict.find_variable(standard_name=stdname, + any_scope=False) + if dvar is not None: + break + # end if + # end for + if dvar: + dnames.append(dvar.get_prop_value('local_name')) + # end if + if not dvar: + emsg += sepstr + "No variable found in " + vnames = [x.name for x in var_dicts] + if len(vnames) > 2: + vstr = ', '.join(vnames[:-1]) + vstr += ', or {}'.format(vnames[-1]) + elif len(vnames) > 1: + vstr = ' or '.join(vnames) + else: + vstr = vnames[0] + # end if + emsg += "{} for dimension '".format(vstr) + emsg += stdname + "' in {vlnam}" + sepstr = '\n' + # end if + # end for + dimlist.append(':'.join(dnames)) + elif not add_dims: + dimlist.append(':') + # end if (no else needed, we must have found loop substitution) + # end for else: - return None + dimlist = [':']*len(dims) + # end if + if dimlist: + dimstr = '(' + ','.join(dimlist) + ')' + else: + dimstr = '' # It ends up being a scalar reference + # end if + if emsg: + ctx = context_string(self.context) + emsg += "{ctx}" + lname = self.get_prop_value('local_name') + raise CCPPError(emsg.format(vlnam=lname, ctx=ctx)) + # end if + return dimstr + + def call_string(self, var_dict, loop_vars=None): + """Construct the actual argument string for this Var by translating + standard names to local names. + String includes array bounds unless loop_vars is None. + if <loop_vars> is not None, look there first for array bounds, + even if usage requires a loop substitution. + """ + if loop_vars is None: + call_str = self.get_prop_value('local_name') + # Look for dims in case this is an array selection variable + dind = call_str.find('(') + if dind > 0: + dimstr = call_str[dind+1:].rstrip()[:-1] + dims = [x.strip() for x in dimstr.split(',')] + call_str = call_str[:dind].strip() + else: + dims = None + # end if + else: + call_str, dims = self.handle_array_ref() + # end if + if dims: + call_str = call_str + '(' + dsep = '' + for dim in dims: + if loop_vars: + lname = loop_vars.find_loop_dim_match(dim) + else: + lname = None + # end if + if lname is None: + isep = '' + lname = "" + for item in dim.split(':'): + if item: + dvar = var_dict.find_variable(standard_name=item, + any_scope=False) + if dvar is None: + iname = None + else: + iname = dvar.get_prop_value('local_name') + # end if + else: + iname = '' + # end if + if iname is not None: + lname = lname + isep + iname + isep = ':' + else: + errmsg = 'No local variable {} in {}{}' + ctx = context_string(self.context) + dname = var_dict.name + raise CCPPError(errmsg.format(item, dname, ctx)) + # end if + # end for + # end if + if lname is not None: + call_str = call_str + dsep + lname + dsep = ', ' + else: + errmsg = 'Unable to convert {} to local variables in {}{}' + ctx = context_string(self.context) + raise CCPPError(errmsg.format(dim, var_dict.name, ctx)) + # end if + # end for + call_str = call_str + ')' + # end if + return call_str + + def valid_value(self, prop_name, test_value=None, error=False): + """Return a valid version of <test_value> if it is a valid value + for the property, <prop_name>. + If <test_value> is not valid, return None or raise an exception, + depending on the value of <error>. + If <test_value> is None, use the current value of <prop_name>. + """ + vprop = Var.get_prop(prop_name) + if vprop is not None: + if test_value is None: + test_val = self.get_prop_value(prop_name) + # end if + valid = vprop.valid_value(test_val, + prop_dict=self._prop_dict, error=error) + else: + valid = None + errmsg = 'Invalid variable property, {}' + raise ParseInternalError(errmsg.format(prop_name)) + # end if + return valid + + def array_ref(self, local_name=None): + """If this Var's local_name is an array reference, return a + Fortran array reference regexp match. + Otherwise, return None""" + if local_name is None: + local_name = self.get_prop_value('local_name') + # end if + match = FORTRAN_SCALAR_REF_RE.match(local_name) + return match + + def intrinsic_elements(self, check_dict=None): + """Return a list of the standard names of this Var object's 'leaf' + intrinsic elements or this Var object's standard name if it is an + intrinsic 'leaf' variable. + If this Var object cannot be reduced to one or more intrinsic 'leaf' + variables (e.g., a DDT Var with no named elements), return None. + A 'leaf' intrinsic Var is a Var of intrinsic Fortran type which has + no children. If a Var has children, those children will be searched + to find leaves. If a Var is a DDT, its named elements are searched. + If <check_dict> is not None, it is checked for children if none are + found in this variable (via finding a variable in <check_dict> with + the same standard name). + Currently, an array of DDTs is not processed (return None) since + Fortran does not support a way to reference those elements. + """ + if self.is_ddt(): + element_names = None + raise ValueError("shouldn't happen?") + # To Do, find and process named elements of DDT + else: + children = self.children() + if (not children) and check_dict: + stdname = self.get_prop_value("standard_name") + pvar = check_dict.find_variable(standard_name=stdname, + any_scope=True) + if pvar: + children = pvar.children() + # end if + # end if + if children: + element_names = list() + for child in children: + child_elements = child.intrinsic_elements() + if isinstance(child_elements, str): + child_elements = [child_elements] + # end if + if child_elements: + for elem in child_elements: + if elem: + element_names.append(elem) + # end if + # end for + # end if + # end for + else: + element_names = self.get_prop_value('standard_name') + # end if + # end if + return element_names + + @classmethod + def constituent_property_names(cls): + """Return a list of the names of constituent properties""" + return Var.__constituent_prop_dict.keys() + + @property + def parent(self): + """Return this variable's parent variable (or None)""" + return self.__parent_var + + @parent.setter + def parent(self, parent_var): + """Set this variable's parent if not already set""" + if self.__parent_var is not None: + emsg = 'Attempting to set parent for {} but parent already set' + lname = self.get_prop_value('local_name') + raise ParseInternalError(emsg.format(lname)) + # end if + if isinstance(parent_var, Var): + self.__parent_var = parent_var + parent_var._add_child(self) + else: + emsg = 'Attempting to set parent for {}, bad parent type, {}' + lname = self.get_prop_value('local_name') + raise ParseInternalError(emsg.format(lname, type(parent_var))) + # end if + + def _add_child(self, cvar): + """Add <cvar> as a child of this Var object""" + if cvar not in self.__children: + self.__children.append(cvar) + # end if + + def children(self): + """Return an iterator over this object's children or None if the + object has no children.""" + children = self.__children + if not children: + pvar = self + while (not children) and pvar.__clone_source: + pvar = pvar.__clone_source + children = pvar.children() + # end while + # end if + return iter(children) if children else None @property def context(self): + """Return this variable's parsed context""" return self._context @property def source(self): + """Return the source object for this variable""" return self._source - @classmethod - def loop_subst_dims(cls, dims): - newdims = list() - for dim in dims: - # loop_subst_match swallows an entire dim string, even ranges - ldim = VarDictionary.loop_subst_match(dim) - if ldim is None: - newdims.append(dim) - else: - newdims.append(ldim) - # End if - # End for - return newdims - - def get_dimensions(self, loop_subst=False): - "Return the variable's dimension string" - dimval = self.get_prop_value('dimensions') - dims = Var.get_prop('dimensions').valid_value(dimval) - if loop_subst: - newdims = loop_subst_dims(dims) + @source.setter + def source(self, new_source): + """Reset this Var's source if <new_source> seems legit""" + if isinstance(new_source, ParseSource): + self._source = new_source else: - newdims = dims - # End if - return newdims + errmsg = 'Attemping to set source of {} ({}) to "{}"' + stdname = self.get_prop_value('standard_name') + lname = self.get_prop_value('local_name') + raise ParseInternalError(errmsg.format(stdname, lname, new_source)) + # end if - def write_def(self, outfile, indent, dict, allocatable=False, loop_subst=False): - '''Write the definition line for the variable.''' - vtype = self.get_prop_value('type') + @property + def host_interface_var(self): + """True iff self is included in the host model interface calls""" + return self.source.type == 'host' + + def get_dimensions(self): + """Return a list with the variable's dimension strings""" + dims = self.valid_value('dimensions') + return dims + + def get_dim_stdnames(self, include_constants=True): + """Return a set of all the dimension standard names for this Var""" + dimset = set() + for dim in self.get_dimensions(): + for name in dim.split(':'): + # Weed out the integers + try: + _ = int(name) + except ValueError: + # Not an integer, maybe add it + if include_constants or (not name in CCPP_CONSTANT_VARS): + dimset.add(name) + # end if + # end try + # end for + # end for + return dimset + + def get_rank(self): + """Return the variable's rank (zero for scalar)""" + dims = self.get_dimensions() + return len(dims) + + def has_horizontal_dimension(self, dims=None): + """Return horizontal dimension standard name string for + <self> or <dims> (if present) if a horizontal dimension is + present in the list""" + if dims is None: + vdims = self.get_dimensions() + else: + vdims = dims + # end if + return Var.find_horizontal_dimension(vdims)[0] + + def has_vertical_dimension(self, dims=None): + """Return vertical dimension standard name string for + <self> or <dims> (if present) if a vertical dimension is + present in the list""" + if dims is None: + vdims = self.get_dimensions() + else: + vdims = dims + # end if + return Var.find_vertical_dimension(vdims)[0] + + def write_def(self, outfile, indent, wdict, allocatable=False, + dummy=False, add_intent=None, extra_space=0): + """Write the definition line for the variable to <outfile>. + If <dummy> is True, include the variable's intent. + If <dummy> is True but the variable has no intent, add the + intent indicated by <add_intent>. This is intended for host model + variables and it is an error to not pass <add_intent> if <dummy> + is True and the variable has no intent property.""" + stdname = self.get_prop_value('standard_name') + if stdname in CCPP_CONSTANT_VARS: + # There is no declaration line for a constant + return + # end if + if self.is_ddt(): + vtype = 'type' + else: + vtype = self.get_prop_value('type') + # end if kind = self.get_prop_value('kind') name = self.get_prop_value('local_name') - dims = self.get_dimensions(loop_subst=loop_subst) - if (dims is not None) and (len(dims) > 0): - if allocatable: + aref = self.array_ref(local_name=name) + if aref is not None: + name = aref.group(1) + # end if + dims = self.get_dimensions() + if dims: + if allocatable or dummy: dimstr = '(:' + ',:'*(len(dims) - 1) + ')' else: - dimstr = '(' - comma = '' - for dim in dims: - # Only ranges or sizes go into declaration - if VarDictionary.loop_var_match(dim): - continue - else: - dstdnames = dim.split(':') - dvars = [dict.find_variable(x) for x in dstdnames] - if None in dvars: - for dim in dstdnames: - if dict.find_variable(dim) is None: - raise CCPPError("No variable found for '{}'".format(dim)) - # End if - # End for - # End if - dnames = [x.get_prop_value('local_name') for x in dvars] - dimstr = dimstr + comma + ':'.join(dnames) - comma = ', ' - # End if - # End for - dimstr = dimstr + ')' - if dimstr == '()': - dimstr = '' # It ends up being a scalar reference - # End if - # End if + dimstr = self.call_dimstring(var_dicts=[wdict]) else: dimstr = '' - # End if - constant = self.get_prop_value('constant') - intent = self.get_prop_value('intent') - if constant and allocatable: - raise CCPPError('Cannot create allocatable variable from constant, {}'.format(name)) - # End if - if constant: + # end if + protected = self.get_prop_value('protected') + if dummy: + intent = self.get_prop_value('intent') + else: + intent = None + # end if + if protected and allocatable: + errmsg = 'Cannot create allocatable variable from protected, {}' + raise CCPPError(errmsg.format(name)) + # end if + if dummy and (intent is None): + if add_intent is not None: + intent = add_intent + else: + errmsg = "<add_intent> is missing for dummy argument, {}" + raise CCPPError(errmsg.format(name)) + # end if + # end if + if protected and dummy: intent_str = 'intent(in) ' elif allocatable: - if len(dimstr) > 0: + if dimstr: intent_str = 'allocatable ' else: intent_str = ' '*13 - # End if + # end if elif intent is not None: - intent_str = 'intent({}){}'.format(intent, ' '*(5 - len(intent))) + alloval = self.get_prop_value('allocatable') + if (intent.lower()[-3:] == 'out') and alloval: + intent_str = 'allocatable, intent({})'.format(intent) + else: + intent_str = 'intent({}){}'.format(intent, + ' '*(5 - len(intent))) + # end if + elif not dummy: + intent_str = '' else: intent_str = ' '*13 - # End if + # end if + if intent_str.strip(): + comma = ',' + else: + comma = ' ' + # end if if self.is_ddt(): - str = "type({kind}){cspc}{intent} :: {name}{dims}" - cspc = ',' + ' '*(13 - len(kind)) + dstr = "type({kind}){cspc}{intent} :: {name}{dims} ! {sname}" + cspc = comma + ' '*(extra_space + 13 - len(kind)) else: - if (kind is not None) and (len(kind) > 0): - str = "{type}({kind}){cspc}{intent} :: {name}{dims}" - cspc = ',' + ' '*(17 - len(vtype) - len(kind)) + if kind: + dstr = "{type}({kind}){cspc}{intent} :: {name}{dims} ! {sname}" + cspc = comma + ' '*(extra_space + 17 - len(vtype) - len(kind)) else: - str = "{type}{cspc}{intent} :: {name}{dims}" - cspc = ',' + ' '*(19 - len(vtype)) - # End if - # End if - outfile.write(str.format(type=vtype, kind=kind, intent=intent_str, - name=name, dims=dimstr, cspc=cspc), indent) + dstr = "{type}{cspc}{intent} :: {name}{dims} ! {sname}" + cspc = comma + ' '*(extra_space + 19 - len(vtype)) + # end if + # end if + outfile.write(dstr.format(type=vtype, kind=kind, intent=intent_str, + name=name, dims=dimstr, cspc=cspc, + sname=stdname), indent) def is_ddt(self): - '''Return True iff <self> is a DDT type.''' - vtype = self.get_prop_value('type') - return registered_fortran_ddt_name(vtype) is not None - - def host_arg_str(self, hvar, host_model, ddt): - '''Create the proper statement of a piece of a host-model variable. - If ddt is True, we can only have a single element selected - ''' - hstr = hvar.get_prop_value('local_name') - # Turn the dimensions string into a proper list and take the correct one - hdims = hvar.get_dimensions() - dimsep = '' - # Does the local name have any extra indices? - match = array_ref_re.match(hstr.strip()) - if match is not None: - hstr = match.group(1) - # Find real names for all the indices - tokens = [x.strip() for x in match.group(2).strip().split(',')] - for token in tokens: - hsdim = self.find_host_model_var(token, host_model) - dimstr = dimstr + dimsep + hsdim - # End for - # End if - if len(hdims) > 0: - dimstr = '(' - else: - dimstr = '' - # End if - for hdim in hdims: - if ddt and (':' in hdim): - raise CCPPError("Invalid DDT dimension spec {}({})".format(hstr, hdimval)) - else: - # Find the host model variable for each dim - hsdims = self.find_host_model_var(hdim, host_model) - dimstr = dimstr + dimsep + hsdims - dimsep = ', ' - # End if - # End for - if len(hdims) > 0: - dimstr = dimstr + ')' - # End if - return hstr + dimstr - - def print_debug(self): - '''Print the data retrieval line for the variable.''' - str='''Contents of {local_name} (* = mandatory for compatibility): - standard_name = {standard_name} * - long_name = {long_name} - units = {units} * - local_name = {local_name} - type = {type} * - dimensions = {dimensions} * - kind = {kind} * -''' - if 'intent' in self.__spec_propdict.keys(): - str += ' intent = {intent}\n' - if 'optional' in self.__spec_propdict.keys(): - str += ' optional = {optional}\n' - if self._context is not None: - str += ' context = {}'.format(self._context) - # End if - return str.format(**self._prop_dict) + """Return True iff <self> is a DDT type.""" + return not self.__intrinsic + + def is_constituent(self): + """Return True iff <self> is a constituent variable.""" + return self.__is_constituent def __str__(self): - '''Print representation or string for Var objects''' + """Print representation or string for Var objects""" return "<Var {standard_name}: {local_name}>".format(**self._prop_dict) def __repr__(self): - '''Object representation for Var objects''' + """Object representation for Var objects""" base = super(Var, self).__repr__() pind = base.find(' object ') if pind >= 0: pre = base[0:pind] else: pre = '<Var' - # End if + # end if bind = base.find('at 0x') if bind >= 0: post = base[bind:] else: post = '>' - # End if - return '{} {}: {} {}'.format(pre, self._prop_dict['standard_name'], self._prop_dict['local_name'], post) + # end if + return '{} {}: {} {}'.format(pre, self._prop_dict['standard_name'], + self._prop_dict['local_name'], post) ############################################################################### @@ -747,101 +1506,268 @@ class VarSpec(object): contains a comma-separated list of dimension standard names in parentheses. """ - def __init__(self, var, loop_subst=False): + def __init__(self, var): + """Initialize the common properties of this VarSpec-based object""" self._name = var.get_prop_value('standard_name') - self._dims = var.get_dimensions(loop_subst=loop_subst) - if len(self._dims) == 0: + self._dims = var.get_dimensions() + if not self._dims: self._dims = None - # End if + # end if @property def name(self): + """Return the name of this VarSpec-based object""" return self._name - def get_dimensions(self, loop_subst=False): - if loop_subst: - rdims = Var.loop_subst_dims(dims) - else: - rdims = dims - # End if + def get_dimensions(self): + """Return the dimensions of this VarSpec-based object.""" + rdims = self._dims return rdims def __repr__(self): + """Return a representation of this object""" if self._dims is not None: - return "{}({})".format(self._name, ', '.join(self._dims)) + repr_str = "{}({})".format(self._name, ', '.join(self._dims)) else: - return self._name - # End if + repr_str = self._name + # end if + return repr_str ############################################################################### -class VarDDT(Var): - """A class to store a variable that is a component of a DDT (at any - DDT nesting level). - """ - - def __init__(self, standard_name, var_ref_list, logger=None): - self._standard_name = standard_name - self._var_ref_list = list() - for var in var_ref_list: - self._var_ref_list.append(var) - # End for - self._vlen = len(self._var_ref_list) - if logger is not None: - lnames = [x.get_prop_value('local_name') for x in self._var_ref_list] - logger.debug('Adding DDT field, {}, {}'.format(standard_name, lnames)) - # End if +__CCPP_PARSE_CONTEXT = ParseContext(filename='metavar.py') - def compatible(self, other, logger=None): - "Compare <other> to the intrinsic variable the end of the DDT chain" - self._var_ref_list[-1].compare(other) - - def get_prop_value(self, name, index=0): - "Return the indicated property value, defauling to the top-level DDT" - if abs(index) >= self._vlen: - raise ParseInternalError("VarDDT.get_prop_value index ({}) out of range".format(index)) - # End if - return self._var_ref_list[index].get_prop_value(name) +############################################################################### - @property - def context(self): - "Return the context of the variable source (DDT root)" - return self._var_ref_list[0].context +def ccpp_standard_var(std_name, source_type, context=None, intent='out'): + """If <std_name> is a CCPP standard variable name, return a variable + with that name. + Otherwise return None. + """ + if std_name in CCPP_STANDARD_VARS: + # Copy the dictionary because Var can change it + vdict = dict(CCPP_STANDARD_VARS[std_name]) + if context is None: + psource = ParseSource('ccpp_standard_vars', source_type, + __CCPP_PARSE_CONTEXT) + else: + psource = ParseSource('ccpp_standard_vars', source_type, context) + # end if + if source_type.lower() == 'scheme': + vdict['intent'] = intent + # end if + newvar = Var(vdict, psource) + else: + newvar = None + # end if + return newvar - @property - def source(self): - "Return the source of the variable source (DDT root)" - return self._var_ref_list[0].source +############################################################################### - def get_dimensions(self, loop_subst=False, index=0): - "Return the dimensions of the indicated var, defauling to the top-level DDT" - if abs(index) >= self._vlen: - raise ParseInternalError("VarDDT.get_prop_value index ({}) out of range".format(index)) - # End if - return self._var_ref_list[index].get_dimensions(loop_subst) +class VarAction(object): + """A base class for variable actions such as loop substitutions or + temporary variable handling.""" + + def __init__(self): + """Initialize this action (nothing to do)""" + # pass # Nothing general here yet + + def add_local(self, vadict, source): + """Add any variables needed by this action to <dict>. + Variable(s) will appear to originate from <source>.""" + raise ParseInternalError('VarAction add_local method must be overriden') + + def write_action(self, vadict, dict2=None, any_scope=False): + """Return a string setting implementing the action of <self>. + Variables must be in <dict> or <dict2>""" + errmsg = 'VarAction write_action method must be overriden' + raise ParseInternalError(errmsg) + + def equiv(self, vmatch): + """Return True iff <vmatch> is equivalent to <self>. + Equivalence at this level is tested by comparing the type + of the objects. + equiv should be overridden with a method that first calls this + method and then tests class-specific object data.""" + return vmatch.__class__ == self.__class__ + + def add_to_list(self, vlist): + """Add <self> to <vlist> unless <self> or its equivalent is + already in <vlist>. This method should not need to be overriden. + Return the (possibly modified) list""" + ok_to_add = True + for vlist_action in vlist: + if vlist_action.equiv(self): + ok_to_add = False + break + # end if + # end for + if ok_to_add: + vlist.append(self) + # end if + return vlist - def write_def(self, outfile, indent, dict, allocatable=False, loop_subst=False): - '''Write the definition line for the variable.''' - pass +############################################################################### - def is_ddt(self): - '''Return True iff <self> is a DDT type.''' - return True +class VarLoopSubst(VarAction): + """A class to handle required loop substitutions where the host model + (or a suite part) does not provide a loop-like variable used by a + suite part or scheme or where a host model passes a subset of a + dimension at run time.""" + + def __init__(self, missing_stdname, required_stdnames, + local_name, set_action): + """Initialize this variable loop substitution""" + self._missing_stdname = missing_stdname + self._local_name = local_name + if isinstance(required_stdnames, Var): + self._required_stdnames = (required_stdnames,) + else: + # Make sure required_stdnames is iterable + try: + _ = (v for v in required_stdnames) + self._required_stdnames = required_stdnames + except TypeError: + emsg = "required_stdnames must be a tuple or a list" + raise ParseInternalError(emsg) + # end try + # end if + self._set_action = set_action + super(VarLoopSubst, self).__init__() + + def has_subst(self, vadict, any_scope=False): + """Determine if variables for the required standard names of this + VarLoopSubst object are present in <vadict> (or in the parents of + <vadict>) if <any_scope> is True. + Return a list of the required variables on success, None on failure. + """ + # A template for 'missing' should be in the standard variable list + subst_list = list() + for name in self.required_stdnames: + svar = vadict.find_variable(standard_name=name, any_scope=any_scope) + if svar is None: + subst_list = None + break + # end i + subst_list.append(svar) + # end for + return subst_list + + def add_local(self, vadict, source): + """Add a Var created from the missing name to <vadict>""" + if self.missing_stdname not in vadict: + lname = self._local_name + local_name = vadict.new_internal_variable_name(prefix=lname) + prop_dict = {'standard_name':self.missing_stdname, + 'local_name':local_name, + 'type':'integer', 'units':'count', 'dimensions':'()'} + var = Var(prop_dict, source) + vadict.add_variable(var, exists_ok=True, gen_unique=True) + # end if + + def equiv(self, vmatch): + """Return True iff <vmatch> is equivalent to <self>. + Equivalence is determined by matching the missing standard name + and the required standard names""" + is_equiv = super(VarLoopSubst, self).equiv(vmatch) + if is_equiv: + is_equiv = vmatch.missing_stdname == self.missing_stdname + # end if + if is_equiv: + for dim1, dim2 in zip(vmatch.required_stdnames, + self.required_stdnames): + if dim1 != dim2: + is_equiv = False + break + # end if + # end for + # end if + return is_equiv + + def write_action(self, vadict, dict2=None, any_scope=False): + """Return a string setting the correct values for our + replacement variable. Variables must be in <vadict> or <dict2>""" + action_dict = {} + if self._set_action: + for stdname in self.required_stdnames: + var = vadict.find_variable(standard_name=stdname, + any_scope=any_scope) + if (var is None) and (dict2 is not None): + var = dict2.find_variable(standard_name=stdname, + any_scope=any_scope) + # end if + if var is None: + errmsg = "Required variable, {}, not found" + raise CCPPError(errmsg.format(stdname)) + # end if + action_dict[stdname] = var.get_prop_value('local_name') + # end for + var = vadict.find_variable(standard_name=self.missing_stdname) + if var is None: + errmsg = "Required variable, {}, not found" + raise CCPPError(errmsg.format(self.missing_stdname)) + # end if + action_dict[self.missing_stdname] = var.get_prop_value('local_name') + # end if + return self._set_action.format(**action_dict) + + def write_metadata(self, mfile): + """Write our properties as metadata to <mfile>""" + pass # Currently no properties to write - def host_arg_str(self, hvar, host_model, ddt): - '''Create the proper statement of a piece of a host-model variable. - If ddt is True, we can only have a single element selected - ''' - pass + @property + def required_stdnames(self): + """Return the _required_stdnames for this object""" + return self._required_stdnames - def print_debug(self): - for var in self._var_ref_list: - var.print(debug) - # End for + @property + def missing_stdname(self): + """Return the _missing_stdname for this object""" + return self._missing_stdname def __repr__(self): - '''Print representation or string for VarDDT objects''' - return "<{}>".format('%'.join([x.__repr__() for x in self._var_ref_list])) + """Return string representing this VarLoopSubst object""" + action_dict = {} + repr_str = '' + if self._set_action: + for stdname in self.required_stdnames: + action_dict[stdname] = stdname + # end for + action_dict[self.missing_stdname] = self.missing_stdname + repr_str = self._set_action.format(**action_dict) + else: + repr_str = "{} => {}".format(self.missing_stdname, + ':'.join(self.required_stdnames)) + # end if + return repr_str + + def __str__(self): + """Return print string for this VarLoopSubst object""" + return "<{}>".format(self.__repr__()) + +# Substitutions where a new variable must be created +CCPP_VAR_LOOP_SUBSTS = { + 'horizontal_loop_extent' : + VarLoopSubst('horizontal_loop_extent', + ('horizontal_loop_begin', 'horizontal_loop_end'), 'ncol', + '{} = {} - {} + 1'.format('{horizontal_loop_extent}', + '{horizontal_loop_end}', + '{horizontal_loop_begin}')), + 'horizontal_loop_begin' : + VarLoopSubst('horizontal_loop_begin', + ('ccpp_constant_one',), 'one', '{horizontal_loop_begin} = 1'), + 'horizontal_loop_end' : + VarLoopSubst('horizontal_loop_end', + ('horizontal_loop_extent',), 'ncol', + '{} = {}'.format('{horizontal_loop_end}', + '{horizontal_loop_extent}')), + 'vertical_layer_dimension' : + VarLoopSubst('vertical_layer_dimension', + ('vertical_layer_index',), 'layer_index', ''), + 'vertical_interface_dimension' : + VarLoopSubst('vertical_interface_dimension', + ('vertical_interface_index',), 'level_index', '') +} ############################################################################### @@ -857,98 +1783,87 @@ class VarDictionary(OrderedDict): VarDictionary(foo) >>> VarDictionary('bar', variables={}) VarDictionary(bar) - >>> VarDictionary('baz', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()))) #doctest: +ELLIPSIS + >>> VarDictionary('baz', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))) #doctest: +ELLIPSIS VarDictionary(baz, [('hi_mom', <__main__.Var hi_mom: foo at 0x...>)]) - >>> print("{}".format(VarDictionary('baz', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()))))) + >>> print("{}".format(VarDictionary('baz', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))))) VarDictionary(baz, ['hi_mom']) - >>> VarDictionary('qux', [Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()))]) #doctest: +ELLIPSIS + >>> VarDictionary('qux', [Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]) #doctest: +ELLIPSIS VarDictionary(qux, [('hi_mom', <__main__.Var hi_mom: foo at 0x...>)]) - >>> VarDictionary('boo').add_variable(Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()))) + >>> VarDictionary('boo').add_variable(Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))) - >>> VarDictionary('who', variables=[Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()))]).prop_list('local_name') + >>> VarDictionary('who', variables=[Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]).prop_list('local_name') ['foo'] - >>> VarDictionary('glitch', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'SCHEME', ParseContext()))).add_variable(Var({'local_name' : 'bar', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname2', 'DDT', ParseContext()))) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> VarDictionary('who', variables=[Var({'local_name' : 'who_var1', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext())),Var({'local_name' : 'who_var', 'standard_name' : 'bye_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]).new_internal_variable_name() + 'who_var2' + >>> VarDictionary('who', variables=[Var({'local_name' : 'who_var1', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))]).new_internal_variable_name(prefix='bar') + 'bar' + >>> VarDictionary('glitch', Var({'local_name' : 'foo', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname', 'scheme', ParseContext()))).add_variable(Var({'local_name' : 'bar', 'standard_name' : 'hi_mom', 'units' : 'm/s', 'dimensions' : '()', 'type' : 'real', 'intent' : 'in'}, ParseSource('vname2', 'DDT', ParseContext()))) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ParseSyntaxError: Invalid Duplicate standard name, 'hi_mom', at <standard input>: """ - # Loop variables - __ccpp_loop_vars__ = ['horizontal_loop_begin', 'horizontal_loop_end', - 'thread_block_number', 'horizontal_loop_extent'] - # Loop substitutions - __ccpp_loop_subst__ = {'horizontal_loop_extent' : - ('horizontal_loop_begin', 'horizontal_loop_end'), - 'thread_block_begin:thread_block_end' : - 'thread_block_number'} - # Dimension substitutions - __ccpp_dim_subst__ = {'horizontal_loop_extent' : 'horizontal_dimension'} - - # Variable representing the constant integer, 1 - __var_one = Var({'local_name' : 'ccpp_one', 'constant' : 'True', - 'standard_name' : 'ccpp_constant_one', - 'units' : '1', 'dimensions' : '()', 'type' : 'integer'}, - ParseSource('VarDictionary', 'REGISTRY', ParseContext())) - def __init__(self, name, variables=None, parent_dict=None, logger=None): - "Unlike dict, VarDictionary only takes a Var or Var list" + """Unlike dict, VarDictionary only takes a Var or Var list""" super(VarDictionary, self).__init__() self._name = name self._logger = logger self._parent_dict = parent_dict if parent_dict is not None: parent_dict.add_sub_scope(self) - # End if + # end if self._sub_dicts = list() + self._local_names = {} # local names in use if isinstance(variables, Var): self.add_variable(variables) elif isinstance(variables, list): for var in variables: self.add_variable(var) - # End for + # end for elif isinstance(variables, VarDictionary): for stdname in variables.keys(): self[stdname] = variables[stdname] - # End for + # end for elif isinstance(variables, dict): - # variables will not be in 'order', but we accept them anyway - for stdname in variables.keys(): - self[stdname] = variables[stdname] - # End for + # variables may not be in 'order', but we accept them anyway + for key in variables.keys(): + var = variables[key] + stdname = var.get_prop_value('standard_name') + self[stdname] = variables[key] + # end for elif variables is not None: raise ParseInternalError('Illegal type for variables, {} in {}'.format(type(variables), self.name)) - # End if + # end if @property def name(self): + """Return this dictionary's name""" return self._name @property def parent(self): + """Return the parent dictionary of this dictionary""" return self._parent_dict - def include_var_in_list(self, var, std_vars, loop_vars, consts): - '''Return True iff <var> is of a type allowed by the logicals, + @staticmethod + def include_var_in_list(var, std_vars, loop_vars, consts): + """Return True iff <var> is of a type allowed by the logicals, <std_vars> (not constants or loop_vars), - <loop_vars> a variable ending in "_extent", "_begin", "_end", or - <consts> a variable with the "constant" property. - ''' - const_val = var.get_prop_value('constant') - const_var = Var.get_prop('constant').valid_value(const_val) - include_var = consts and const_var + <loop_vars> a variable ending in '_extent', '_begin', '_end', or + <consts> a variable with the 'protected' property. + """ + standard_name = var.get_prop_value('standard_name') + const_var = standard_name in CCPP_CONSTANT_VARS + loop_var = standard_name in CCPP_LOOP_VAR_STDNAMES + include_var = (consts and const_var) or (loop_var and loop_vars) if not include_var: - standard_name = var.get_prop_value('standard_name') - loop_var = VarDictionary.loop_var_match(standard_name) - include_var = loop_var and loop_vars - if not include_var: - std_var = not (loop_var or const_var) - include_var = std_vars and std_var - # End if - # End if + std_var = not (loop_var or const_var) + include_var = std_vars and std_var + # end if return include_var def variable_list(self, recursive=False, std_vars=True, loop_vars=True, consts=True): - "Return a list of all variables" + """Return a list of all variables""" if recursive and (self._parent_dict is not None): vlist = self._parent_dict.variable_list(recursive=recursive, std_vars=std_vars, @@ -956,39 +1871,113 @@ def variable_list(self, recursive=False, consts=consts) else: vlist = list() - # End if - for sn in self.keys(): - var = self[sn] + # end if + for stdnam in self: + var = self[stdnam] if self.include_var_in_list(var, std_vars=std_vars, loop_vars=loop_vars, consts=consts): vlist.append(var) - # End if - # End for + # end if + # end for return vlist - def add_variable(self, newvar, exists_ok=False): - """Add a variable if it does not conflict with existing entries""" + def add_variable(self, newvar, exists_ok=False, gen_unique=False, + adjust_intent=False): + """Add <newvar> if it does not conflict with existing entries + If <exists_ok> is True, attempting to add an identical copy is okay. + If <gen_unique> is True, a new local_name will be created if a + local_name collision is detected. + if <adjust_intent> is True, adjust conflicting intents to inout.""" standard_name = newvar.get_prop_value('standard_name') + cvar = self.find_variable(standard_name=standard_name, any_scope=False) if (standard_name in self) and (not exists_ok): # We already have a matching variable, error! if self._logger is not None: - self._logger.error("Attempt to add duplicate variable, {} from {}".format(standard_name, newvar.source.name)) - # End if - raise ParseSyntaxError("Duplicate standard name in {}".format(self.name), - token=standard_name, context=newvar._context) - # End if - cvar = self.find_variable(standard_name) - if (cvar is not None) and (not cvar.compatible(newvar, self._logger)): - if self._logger is not None: - self._logger.error("Attempt to add incompatible variable, {} from {}".format(standard_name, newvar.source.name)) - # End if - errstr = "standard name, incompatible with {}" - raise ParseSyntaxError(errstr.format(cvar.context), - token=standard_name, - context=newvar.source.context) - # End if + emsg = "Attempt to add duplicate variable, {} from {}" + self._logger.error(emsg.format(standard_name, + newvar.source.name)) + # end if + emsg = "(duplicate) standard name in {}" + if cvar is not None: + emsg += ", defined at {}".format(cvar.context) + # end if + raise ParseSyntaxError(emsg.format(self.name), + token=standard_name, context=newvar.context) + # end if + if cvar is not None: + compat, reason = cvar.compatible(newvar, logger=self._logger) + if compat: + # Check for intent mismatch + vintent = cvar.get_prop_value('intent') + dintent = newvar.get_prop_value('intent') + # XXgoldyXX: Add special case for host variables here? + if vintent != dintent: + if adjust_intent: + if (vintent == 'in') and (dintent in ['inout', 'out']): + cvar.adjust_intent(newvar) + elif ((vintent == 'out') and + (dintent in ['inout', 'in'])): + cvar.adjust_intent(newvar) + # No else, variables are compatible + else: + emsg = "Attempt to add incompatible variable to {}" + emsg += "\nintent mismatch: {} ({}){} != {} ({}){}" + nlname = newvar.get_prop_value('local_name') + clname = cvar.get_prop_value('local_name') + nctx = context_string(newvar.context) + cctx = context_string(cvar.context) + raise CCPPError(emsg.format(self.name, + clname, vintent, cctx, + nlname, dintent, nctx)) + # end if + # end if + else: + if self._logger is not None: + emsg = "Attempt to add incompatible variable, {} from {}" + emsg += "\n{}".format(reason) + self._logger.error(emsg.format(standard_name, + newvar.source.name)) + # end if + nlname = newvar.get_prop_value('local_name') + clname = cvar.get_prop_value('local_name') + cstr = context_string(cvar.context, with_comma=True) + errstr = "new variable, {}, incompatible {} between {}{} and" + raise ParseSyntaxError(errstr.format(nlname, reason, + clname, cstr), + token=standard_name, + context=newvar.context) + # end if + # end if + lname = newvar.get_prop_value('local_name') + lvar = self.find_local_name(lname) + if lvar is not None: + if gen_unique: + new_lname = self.new_internal_variable_name(prefix=lname) + newvar = newvar.clone(new_lname) + elif not exists_ok: + errstr = 'Invalid local_name: {} already registered{}' + cstr = context_string(lvar.source.context, with_comma=True) + raise ParseSyntaxError(errstr.format(lname, cstr), + context=newvar.source.context) + # end if (no else, things are okay) + # end if (no else, things are okay) + # Check if this variable has a parent (i.e., it is an array reference) + aref = newvar.array_ref(local_name=lname) + if aref is not None: + pname = aref.group(1).strip() + pvar = self.find_local_name(pname) + if pvar is not None: + newvar.parent = pvar + # end if + # end if # If we make it to here without an exception, add the variable - self[standard_name] = newvar + if standard_name not in self: + self[standard_name] = newvar + # end if + lname = lname.lower() + if lname not in self._local_names: + self._local_names[lname] = standard_name + # end if def remove_variable(self, standard_name): """Remove <standard_name> from the dictionary. @@ -996,92 +1985,298 @@ def remove_variable(self, standard_name): """ if standard_name in self: del self[standard_name] - # End if - - def find_variable(self, standard_name, any_scope=True, loop_subst=False): - """Return the variable matching <standard_name> or None - If any_scope is True, search parent scopes if not in current scope. + # end if + + def add_variable_dimensions(self, var, ignore_sources, to_dict=None, + adjust_intent=False): + """Attempt to find a source for each dimension in <var> and add that + Variable to this dictionary or to <to_dict>, if passed. + Dimension variables which are found but whose Source is in + <ignore_sources> are not added to this dictionary. + Return an error string on failure.""" + + err_ret = '' + ctx = '' + vdims = var.get_dim_stdnames(include_constants=False) + for dimname in vdims: + if to_dict: + present = to_dict.find_variable(standard_name=dimname, + any_scope=False) + else: + present = None + # end if + if not present: + present = self.find_variable(standard_name=dimname, + any_scope=False) + # end if + if not present: + dvar = self.find_variable(standard_name=dimname, any_scope=True) + if dvar and (dvar.source.type not in ignore_sources): + if to_dict: + to_dict.add_variable(dvar, exists_ok=True, + adjust_intent=adjust_intent) + else: + self.add_variable(dvar, exists_ok=True, + adjust_intent=adjust_intent) + # end if + else: + if err_ret: + err_ret += '\n' + else: + ctx = context_string(var.context) + # end if + err_ret += "{}: ".format(self.name) + err_ret += "Cannot find variable for dimension, {}, of {}{}" + vstdname = var.get_prop_value('standard_name') + err_ret = err_ret.format(dimname, vstdname, ctx) + if dvar: + err_ret += "\nFound {} from excluded source, '{}'{}" + lname = dvar.get_prop_value('local_name') + dctx = context_string(dvar.context) + err_ret = err_ret.format(lname, dvar.source.type, dctx) + # end if + # end if + # end if + # end for + return err_ret + + def find_variable(self, standard_name=None, source_var=None, + any_scope=True, clone=None, + search_call_list=False, loop_subst=False): + """Attempt to return the variable matching <standard_name>. + if <standard_name> is None, the standard name from <source_var> is used. + It is an error to pass both <standard_name> and <source_var> if + the standard name of <source_var> is not the same as <standard_name>. + If <any_scope> is True, search parent scopes if not in current scope. + If the variable is not found and <clone> is not None, add a clone of + <clone> to this dictionary. + If the variable is not found and <clone> is None, return None. + <search_call_list> and <loop_subst> are not used in this base class + but are included to provide a consistent interface. """ - if standard_name in self: + if standard_name is None: + if source_var is None: + emsg = "One of <standard_name> or <source_var> must be passed." + raise ParseInternalError(emsg) + # end if + standard_name = source_var.get_prop_value('standard_name') + elif source_var is not None: + stest = source_var.get_prop_value('standard_name') + if stest != standard_name: + emsg = ("<standard_name> and <source_var> must match " + + "if both are passed.") + raise ParseInternalError(emsg) + # end if + # end if + if standard_name in CCPP_CONSTANT_VARS: + var = CCPP_CONSTANT_VARS[standard_name] + elif standard_name in self: var = self[standard_name] elif any_scope and (self._parent_dict is not None): - var = self._parent_dict.find_variable(standard_name, any_scope) + src_clist = search_call_list + var = self._parent_dict.find_variable(standard_name=standard_name, + source_var=source_var, + any_scope=any_scope, + clone=clone, + search_call_list=src_clist, + loop_subst=loop_subst) else: var = None - # End if - if (var is None) and loop_subst: - var = self.find_loop_subst(standard_name, any_scope=any_scope) - # End if + # end if + if (var is None) and (clone is not None): + lname = clone.get_prop_value['local_name'] + new_name = self.new_internal_variable_name(prefix=lname) + var = clone.clone(new_name) + # end if return var + def find_local_name(self, local_name, any_scope=False): + """Return a variable in this dictionary with local_name = <local_name> + or return None if no such variable is currently in the dictionary""" + pvar = None + lname = local_name.lower() # Case is insensitive for local names + if lname in self._local_names: + stdname = self._local_names[lname] + pvar = self.find_variable(standard_name=stdname, any_scope=False) + if not pvar: + emsg = 'VarDictionary {} should have standard_name, {}, ' + emsg += 'based on local_name {}' + raise ParseInternalError(emsg.format(self.name, + stdname, local_name)) + # end if (no else, pvar is fine) + elif any_scope and (self._parent_dict is not None): + pvar = self._parent_dict.find_local_name(local_name, + any_scope=any_scope) + # end if + return pvar + + def find_error_variables(self, any_scope=False, clone_as_out=False): + """Find and return a consistent set of error variables in this + dictionary. + First, attempt to find the set of errflg and errmsg. + Currently, there is no alternative but it will be inserted here. + If a consistent set is not found, return an empty list. + """ + err_vars = list() + # Look for the combo of errflg and errmsg + errflg = self.find_variable(standard_name="ccpp_error_flag", + any_scope=any_scope) + errmsg = self.find_variable(standard_name="ccpp_error_message", + any_scope=any_scope) + if (errflg is not None) and (errmsg is not None): + if clone_as_out: + eout = errmsg.get_prop_value('intent') + if eout != 'out': + subst_dict = {'intent':'out'} + errmsg = errmsg.clone(subst_dict) + # end if + # end if + err_vars.append(errmsg) + if clone_as_out: + eout = errflg.get_prop_value('intent') + if eout != 'out': + subst_dict = {'intent':'out'} + errflg = errflg.clone(subst_dict) + # end if + # end if + err_vars.append(errflg) + # end if + return err_vars + def add_sub_scope(self, sub_dict): - 'Add a child dictionary to enable traversal' + """Add a child dictionary to enable traversal""" self._sub_dicts.append(sub_dict) + def sub_dictionaries(self): + """Return a list of this dictionary's sub-dictionaries""" + return list(self._sub_dicts) + def prop_list(self, prop_name, std_vars=True, loop_vars=True, consts=True): - '''Return a list of the <prop_name> property for each variable. + """Return a list of the <prop_name> property for each variable. std_vars are variables which are neither constants nor loop variables. - ''' + """ plist = list() - for standard_name in self.keys(): - var = self.find_variable(standard_name, any_scope=False, loop_subst=False) - if self.include_var_in_list(var, std_vars=std_vars, loop_vars=loop_vars, consts=consts): - plist.append(self[standard_name].get_prop_value(prop_name)) - # End if - # End for + for var in self.values(): + if self.include_var_in_list(var, std_vars=std_vars, + loop_vars=loop_vars, consts=consts): + plist.append(var.get_prop_value(prop_name)) + # end if + # end for return plist - def declare_variables(self, outfile, indent, + def declare_variables(self, outfile, indent, dummy=False, std_vars=True, loop_vars=True, consts=True): - "Write out the declarations for this dictionary's variables" + """Write out the declarations for this dictionary's variables""" for standard_name in self.keys(): - var = self.find_variable(standard_name, any_scope=False, loop_subst=False) - if self.include_var_in_list(var, std_vars=std_vars, loop_vars=loop_vars, consts=consts): - self[standard_name].write_def(outfile, indent, self) - # End if - # End for + var = self.find_variable(standard_name=standard_name, + any_scope=False) + if self.include_var_in_list(var, std_vars=std_vars, + loop_vars=loop_vars, consts=consts): + self[standard_name].write_def(outfile, indent, self, + dummy=dummy) + # end if + # end for def merge(self, other_dict): - "Add new entries from <other_dict>" + """Add new entries from <other_dict>""" for ovar in other_dict.variable_list(): self.add_variable(ovar) - # End for + # end for + + @staticmethod + def loop_var_okay(standard_name, is_run_phase): + """If <standard_name> is a loop variable, return True only if it + is appropriate for the phase (e.g., horizontal_loop_extent is okay + during a run phase only while horizontal_dimension is not allowed + during a run phase). + If <standard_name> is not a loop variable, return True""" + if (standard_name in CCPP_LOOP_VAR_STDNAMES) and (not is_run_phase): + # Prohibit looking for loop variables except in run phases + retval = False + elif (standard_name == "horizontal_dimension") and is_run_phase: + # horizontal_dimension should not be used in run phase + retval = False + else: + retval = True + # end if + return retval def __str__(self): - return "VarDictionary({}, {})".format(self.name, self.keys()) + """Return a string that represents this dictionary object""" + return "VarDictionary({}, {})".format(self.name, list(self.keys())) def __repr__(self): + """Return an unique representation for this object""" srepr = super(VarDictionary, self).__repr__() vstart = len("VarDictionary") + 1 if len(srepr) > vstart + 1: comma = ", " else: comma = "" - # End if + # end if return "VarDictionary({}{}{}".format(self.name, comma, srepr[vstart:]) def __del__(self): - try: - for key in self.keys(): - del self[key] - # End for - except Exception as e: - pass # python does not guarantee object state during finalization - # End try + """Attempt to delete all of the variables in this dictionary""" + self.clear() + + def __eq__(self, other): + """Override == to restore object equality, not dictionary + list equality""" + return self is other @classmethod def loop_var_match(cls, standard_name): - 'Return True iff <standard_name> is a loop variable' - return standard_name in cls.__ccpp_loop_vars__ + """Return a VarLoopSubst if <standard_name> is a loop variable, + otherwise, return None""" + # Strip off 'ccpp_constant_one:', if present + if standard_name[0:18] == 'ccpp_constant_one:': + beg = 18 + else: + beg = 0 + # end if + if standard_name[beg:] in CCPP_VAR_LOOP_SUBSTS: + vmatch = CCPP_VAR_LOOP_SUBSTS[standard_name[beg:]] + else: + vmatch = None + # end if + return vmatch + + def find_loop_dim_match(self, dim_string): + """Find a match in local dict for <dim_string>. That is, if + <dim_string> has a loop dim substitution, and each standard name + in that substitution is in self, return the equivalent local + name string.""" + ldim_string = None + if dim_string in CCPP_LOOP_DIM_SUBSTS: + lnames = list() + std_subst = CCPP_LOOP_DIM_SUBSTS[dim_string].split(':') + for ssubst in std_subst: + svar = self.find_variable(standard_name=ssubst, any_scope=False) + if svar is not None: + lnames.append(svar.get_prop_value('local_name')) + else: + break + # end if + # end for + if len(lnames) == len(std_subst): + ldim_string = ':'.join(lnames) + # end if + # end if + return ldim_string @classmethod - def loop_subst_match(cls, standard_name): - 'Return a loop substitution match, if any, for <standard_name>' - if standard_name in cls.__ccpp_loop_subst__: - return cls.__ccpp_loop_subst__[standard_name] - else: - return None - # End if + def find_loop_dim_from_index(cls, index_string): + """Given a loop index standard name, find the related loop dimension. + """ + loop_dim_string = None + for dim_string in CCPP_LOOP_DIM_SUBSTS: + if index_string == CCPP_LOOP_DIM_SUBSTS[dim_string]: + loop_dim_string = dim_string + break + # end if + # end for + return loop_dim_string def find_loop_subst(self, standard_name, any_scope=True, context=None): """If <standard_name> is of the form <standard_name>_extent and that @@ -1089,66 +2284,103 @@ def find_loop_subst(self, standard_name, any_scope=True, context=None): (<standard_name>_begin, <standard_name>_end), if those variables are in the dictionary. If <standard_name>_extent *is* present, return that variable as a - range, (__var_one, <standard_name>_extent) + range, ('ccpp_constant_one', <standard_name>_extent) In other cases, return None """ - loop_var = VarDictionary.loop_subst_match(standard_name) + loop_var = VarDictionary.loop_var_match(standard_name) logger_str = None if loop_var is not None: # Let us see if we can fix a loop variable - dict_var = self.find_variable(standard_name, - any_scope=any_scope, loop_subst=False) + dict_var = self.find_variable(standard_name=standard_name, + any_scope=any_scope) if dict_var is not None: - my_var = (VarDictionary.__var_one, dict_var) + var_one = CCPP_CONSTANT_VARS['ccpp_constant_one'] + my_var = (var_one, dict_var) if self._logger is not None: - logger_str = "loop_subst: found {}{}".format(standard_name, context_string(context)) - # End if + lstr = "loop_subst: found {}{}" + logger_str = lstr.format(standard_name, + context_string(context)) + # end if else: - my_vars = [self.find_variable(x) for x in loop_var] + my_vars = [self.find_variable(standard_name=x, + any_scope=any_scope) + for x in loop_var] if None not in my_vars: my_var = tuple(my_vars) if self._logger is not None: - names = [x.get_prop_value('local_name') for x in my_vars] - logger_str = "loop_subst: {} ==> (){}".format(standard_name, ', '.join(names), context_string(context)) - # End if + names = [x.get_prop_value('local_name') + for x in my_vars] + lstr = "loop_subst: {} ==> ({}){}" + logger_str = lstr.format(standard_name, + ', '.join(names), + context_string(context)) + # end if else: if self._logger is not None: - logger_str = "loop_subst: {} ==> ({}, {}) FAILED{}".format(standard_name, beg_name, end_name, context_string(context)) - # End if + lstr = "loop_subst: {} ==> (??) FAILED{}" + logger_str = lstr.format(standard_name, + context_string(context)) + # end if my_var = None - # End if - # End if + # end if + # end if else: if self._logger is not None: - logger_str = "loop_subst: {} is not a loop variable{}".format(standard_name, context_string(context)) - # End if + lstr = "loop_subst: {} is not a loop variable{}" + logger_str = lstr.format(standard_name, + context_string(context)) + # end if my_var = None - # End if + # end if if logger_str is not None: self._logger.debug(logger_str) - # End if + # end if return my_var - def find_dimension_subst(self, standard_name, any_scope=True, context=None): - """If <standard_name> is of the form <standard_name>_loop_extent - attempt to find a variable of the form <standard_name>_dimension - and return that. If such a variable is not found, raise an exception. - If <standard_name> is not of the form <standard_name>_extent, return - None. + def var_call_string(self, var, loop_vars=None): + """Construct the actual argument string for <var> by translating + standard names to local names. String includes array bounds. + if <loop_vars> is present, look there first for array bounds, + even if usage requires a loop substitution. """ - loop_var = standard_name in VarDictionary.__ccpp_dim_subst__ - logger_str = None - if loop_var: - # Let us see if we can replace the variable - dim_name = VarDictionary.__ccpp_dim_subst__[standard_name] - my_var = self.find_variable(dim_name, any_scope=any_scope) - if my_var is None: - raise CCPPError("Dimension variable, {} not found{}".format(dim_name, context_string(context))) - # End if + return var.call_string(self, loop_vars=loop_vars) + + def new_internal_variable_name(self, prefix=None, max_len=63): + """Find a new local variable name for this dictionary. + The new name begins with <prefix>_<self.name> or with <self.name> + (where <self.name> is this VarDictionary's name) if <prefix> is None. + The new variable name is kept to a maximum length of <max_len>. + """ + index = 0 + if prefix is None: + var_prefix = '{}_var'.format(self.name) else: - my_var = None - # End if - return my_var + var_prefix = '{}'.format(prefix) + # end if + varlist = [x for x in self._local_names.keys() if var_prefix in x] + newvar = None + while newvar is None: + if index == 0: + newvar = var_prefix + else: + newvar = '{}{}'.format(var_prefix, index) + # end if + index = index + 1 + if len(newvar) > max_len: + var_prefix = var_prefix[:-1] + newvar = None + elif newvar in varlist: + newvar = None + # end if + # end while + return newvar + +############################################################################### + +# List of constant variables which are universally available +CCPP_CONSTANT_VARS = VarDictionary('CCPP_CONSTANT_VARS', + [ccpp_standard_var('ccpp_constant_one', + 'module')]) ############################################################################### if __name__ == "__main__": diff --git a/scripts/mkstatic.py b/scripts/mkstatic.py index 5064bb50..88485803 100755 --- a/scripts/mkstatic.py +++ b/scripts/mkstatic.py @@ -16,7 +16,7 @@ from common import CCPP_STAGES from common import CCPP_ERROR_FLAG_VARIABLE, CCPP_ERROR_MSG_VARIABLE, CCPP_LOOP_COUNTER, CCPP_LOOP_EXTENT from common import CCPP_BLOCK_NUMBER, CCPP_BLOCK_COUNT, CCPP_BLOCK_SIZES, CCPP_INTERNAL_VARIABLES -from common import CCPP_HORIZONTAL_DIMENSION, CCPP_HORIZONTAL_LOOP_EXTENT +from common import CCPP_CONSTANT_ONE, CCPP_HORIZONTAL_DIMENSION, CCPP_HORIZONTAL_LOOP_EXTENT from common import FORTRAN_CONDITIONAL_REGEX_WORDS, FORTRAN_CONDITIONAL_REGEX from common import CCPP_TYPE, STANDARD_VARIABLE_TYPES, STANDARD_CHARACTER_TYPE from common import CCPP_STATIC_API_MODULE, CCPP_STATIC_SUBROUTINE_NAME @@ -966,6 +966,7 @@ def write(self, metadata_request, metadata_define, arguments): for dim_expression in var.dimensions: dims = dim_expression.split(':') for dim in dims: + dim = dim.lower() try: dim = int(dim) except ValueError: @@ -1002,6 +1003,7 @@ def write(self, metadata_request, metadata_define, arguments): # standard name in the list of known variables items = FORTRAN_CONDITIONAL_REGEX.findall(var.active) for item in items: + item = item.lower() if item in FORTRAN_CONDITIONAL_REGEX_WORDS: conditional += item else: @@ -1141,7 +1143,7 @@ def write(self, metadata_request, metadata_define, arguments): # Convert blocked data in init and finalize steps - only required for variables with block number and horizontal_dimension if ccpp_stage in ['init', 'timestep_init', 'timestep_finalize', 'finalize'] and \ CCPP_INTERNAL_VARIABLES[CCPP_BLOCK_NUMBER] in local_vars[var_standard_name]['name'] and \ - CCPP_HORIZONTAL_DIMENSION in var.dimensions: + '{}:{}'.format(CCPP_CONSTANT_ONE,CCPP_HORIZONTAL_DIMENSION) in var.dimensions: # Reuse existing temporary variable, if possible if local_vars[var_standard_name]['name'] in tmpvars.keys(): # If the variable already has a local variable (tmpvar), reuse it @@ -1168,7 +1170,7 @@ def write(self, metadata_request, metadata_define, arguments): else: # Handle dimensions like "A:B", "A:3", "-1:Z" if ':' in dim: - dims = dim.split(':') + dims = [ x.lower() for x in dim.split(':')] try: dim0 = int(dims[0]) except ValueError: @@ -1187,7 +1189,7 @@ def write(self, metadata_request, metadata_define, arguments): alloc_dimensions.append('{}:{}'.format(dim0,dim1)) # Padding of additional dimensions - before and after the horizontal dimension - hdim_index = tmpvar.dimensions.index(CCPP_HORIZONTAL_DIMENSION) + hdim_index = tmpvar.dimensions.index('{}:{}'.format(CCPP_CONSTANT_ONE,CCPP_HORIZONTAL_DIMENSION)) dimpad_before = '' + ':,'*(len(tmpvar.dimensions[:hdim_index])) dimpad_after = '' + ',:'*(len(tmpvar.dimensions[hdim_index+1:])) diff --git a/scripts/parse_tools/__init__.py b/scripts/parse_tools/__init__.py index 6e5c23e2..5c566c45 100644 --- a/scripts/parse_tools/__init__.py +++ b/scripts/parse_tools/__init__.py @@ -1,19 +1,60 @@ """Public API for the parse_tools library """ +from __future__ import absolute_import +import sys +import os.path +sys.path.insert(0, os.path.dirname(__file__)) + +# pylint: disable=wrong-import-position +from parse_source import ParseContext, ParseSource +from parse_source import ParseSyntaxError, ParseInternalError +from parse_source import CCPPError, context_string +from parse_source import unique_standard_name, reset_standard_name_counter +from parse_object import ParseObject +from parse_checkers import check_fortran_id, FORTRAN_ID +from parse_checkers import FORTRAN_DP_RE +from parse_checkers import FORTRAN_SCALAR_REF, FORTRAN_SCALAR_REF_RE +from parse_checkers import check_fortran_ref, check_fortran_literal +from parse_checkers import check_fortran_intrinsic, check_local_name +from parse_checkers import check_diagnostic_id, check_diagnostic_fixed +from parse_checkers import check_fortran_type, check_balanced_paren +from parse_checkers import fortran_list_match +from parse_checkers import registered_fortran_ddt_name +from parse_checkers import register_fortran_ddt_name +from parse_checkers import check_units, check_dimensions, check_cf_standard_name +from parse_checkers import check_default_value, check_valid_values +from parse_log import init_log, set_log_level, flush_log +from parse_log import set_log_to_stdout, set_log_to_null +from parse_log import set_log_to_file +from preprocess import PreprocStack +from xml_tools import find_schema_file, find_schema_version +from xml_tools import read_xml_file, validate_xml_file +# pylint: enable=wrong-import-position + __all__ = [ 'CCPPError', 'check_balanced_paren', 'check_cf_standard_name', + 'check_default_value', + 'check_diagnostic_id', + 'check_diagnostic_fixed', 'check_dimensions', 'check_fortran_id', 'check_fortran_intrinsic', + 'check_fortran_literal', 'check_fortran_ref', 'check_fortran_type', + 'check_local_name', + 'check_valid_values', 'context_string', + 'find_schema_file', + 'find_schema_version', + 'flush_log', 'FORTRAN_DP_RE', 'FORTRAN_ID', 'FORTRAN_SCALAR_REF', - 'initLog', + 'FORTRAN_SCALAR_REF_RE', + 'init_log', 'ParseContext', 'ParseInternalError', 'ParseSource', @@ -21,27 +62,13 @@ 'ParseObject', 'PreprocStack', 'register_fortran_ddt_name', + 'read_xml_file', 'registered_fortran_ddt_name', - 'setLogLevel', - 'setLogToFile', - 'setLogToNull', - 'setLogToStdout', + 'reset_standard_name_counter', + 'set_log_level', + 'set_log_to_file', + 'set_log_to_null', + 'set_log_to_stdout', + 'unique_standard_name', + 'validate_xml_file' ] - -from .parse_source import ParseContext, ParseSource -from .parse_source import ParseSyntaxError, ParseInternalError -from .parse_source import CCPPError, context_string -from .parse_object import ParseObject -from .parse_checkers import check_fortran_id, LITERAL_INT, FORTRAN_ID -from .parse_checkers import FORTRAN_DP_RE -from .parse_checkers import check_fortran_ref, FORTRAN_SCALAR_REF -from .parse_checkers import check_fortran_intrinsic -from .parse_checkers import check_fortran_type, check_balanced_paren -from .parse_checkers import registered_fortran_ddt_name -from .parse_checkers import register_fortran_ddt_name -from .parse_checkers import check_dimensions, check_cf_standard_name -from .parse_log import init_log, set_log_level -from .parse_log import set_log_to_stdout, set_log_to_null -from .parse_log import set_log_to_file -from .preprocess import PreprocStack -# End if diff --git a/scripts/parse_tools/parse_checkers.py b/scripts/parse_tools/parse_checkers.py index f0c5fd03..07ae0c43 100755 --- a/scripts/parse_tools/parse_checkers.py +++ b/scripts/parse_tools/parse_checkers.py @@ -4,78 +4,152 @@ # Python library imports import re +import sys +import os.path +sys.path.insert(0, os.path.dirname(__file__)) # CCPP framework imports -from .parse_source import CCPPError +from parse_source import CCPPError, ParseInternalError ######################################################################## -def check_dimensions(test_val, max_len=0, error=False): +def check_units(test_val, prop_dict, error): + """Return <test_val> if a valid unit, otherwise, None + if <error> is True, raise an Exception if <test_val> is not valid. + >>> check_units('m/s', None, True) + 'm/s' + >>> check_units('kg m-3', None, True) + 'kg m-3' + >>> check_units('1', None, True) + '1' + >>> check_units('', None, False) + + >>> check_units('', None, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: '' is not a valid unit + >>> check_units(' ', None, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: '' is not a valid unit + >>> check_units(['foo'], None, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: ['foo'] is invalid; not a string + """ + if not isinstance(test_val, str): + if error: + raise CCPPError("'{}' is invalid; not a string".format(test_val)) + else: + test_val = None + # end if + else: + if not test_val.strip(): + if error: + raise CCPPError("'{}' is not a valid unit".format(test_val)) + else: + test_val = None + # end if + # end if + # end if + + # DH* 20210812 + # Temporary workaround to convert unit 'none' (used for + # dimensionless quantities in ccpp-physics/UFS/SCM) to '1' + if test_val.lower() == 'none': + test_val = '1' + # *DH 20210812 + + return test_val + +def check_dimensions(test_val, prop_dict, error, max_len=0): """Return <test_val> if a valid dimensions list, otherwise, None If <max_len> > 0, each string in <test_val> must not be longer than <max_len>. if <error> is True, raise an Exception if <test_val> is not valid. - >>> check_dimensions(["dim1", "dim2name"]) + >>> check_dimensions(["dim1", "dim2name"], None, False) ['dim1', 'dim2name'] - >>> check_dimensions([":", ":"]) + >>> check_dimensions([":", ":"], None, False) [':', ':'] - >>> check_dimensions([":", "dim2"]) + >>> check_dimensions([":", "dim2"], None, False) [':', 'dim2'] - >>> check_dimensions(["dim1", ":"]) + >>> check_dimensions(["dim1", ":"], None, False) ['dim1', ':'] - >>> check_dimensions(["8", "::"]) + >>> check_dimensions(["8", "::"], None, False) ['8', '::'] - >>> check_dimensions(['start1:end1', 'start2:end2']) + >>> check_dimensions(['start1:end1', 'start2:end2'], None, False) ['start1:end1', 'start2:end2'] - >>> check_dimensions(['start1:', 'start2:end2']) + >>> check_dimensions(['start1:', 'start2:end2'], None, False) ['start1:', 'start2:end2'] - >>> check_dimensions(["dim1", "dim2name"], max_len=5) + >>> check_dimensions(['start1 :end1', 'start2: end2'], None, False) + ['start1 :end1', 'start2: end2'] + >>> check_dimensions(['size(foo)'], None, False) + ['size(foo)'] + >>> check_dimensions(['size(foo,1) '], None, False) + ['size(foo,1) '] + >>> check_dimensions(['size(foo,1'], None, False) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Invalid dimension component, size(foo,1 + >>> check_dimensions(["dim1", "dim2name"], None, False, max_len=5) - >>> check_dimensions(["dim1", "dim2name"], error=True, max_len=5) + >>> check_dimensions(["dim1", "dim2name"], None, True, max_len=5) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'dim2name' is too long (> 5 chars) - >>> check_dimensions("hi_mom", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_dimensions("hi_mom", None, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'hi_mom' is invalid; not a list """ - if type(test_val) != list: + if not isinstance(test_val, list): if error: raise CCPPError("'{}' is invalid; not a list".format(test_val)) else: test_val = None - # End if + # end if else: for item in test_val: isplit = item.split(':') # Check for too many colons if (len(isplit) > 3): if error: - raise CCPPError("'{}' is an invalid dimension range".format(item)) + errmsg = "'{}' is an invalid dimension range" + raise CCPPError(errmsg.format(item)) else: test_val = None - # End if + # end if break - # End if + # end if # Check possible dim styles (a, a:b, a:, :b, :, ::, a:b:c, a::c) - tdims = [x for x in isplit if len(x) > 0] + tdims = [x.strip() for x in isplit if len(x) > 0] for tdim in tdims: # Check numeric value first try: valid = isinstance(int(tdim), int) except ValueError as ve: # Not an integer, try a Fortran ID - valid = check_fortran_id(tdim, max_len=max_len, error=error) is not None + valid = check_fortran_id(tdim, None, + error, max_len=max_len) is not None + if not valid: + # Check for size entry -- simple check + tcheck = tdim.strip().lower() + if tcheck[0:4] == 'size': + ploc = check_balanced_paren(tdim[4:]) + if -1 in ploc: + emsg = 'Invalid dimension component, {}' + raise CCPPError(emsg.format(tdim)) + else: + valid = tdim + # end if + # end if + # end if # End try if not valid: if error: - raise CCPPError("'{}' is an invalid dimension name".format(item)) + errmsg = "'{}' is an invalid dimension name" + raise CCPPError(errmsg.format(item)) else: test_val = None - # End if + # end if break - # End if - # End for - # End for - # End if + # end if + # end for + # end for + # end if return test_val ######################################################################## @@ -84,41 +158,44 @@ def check_dimensions(test_val, max_len=0, error=False): CF_ID = r"(?i)[a-z][a-z0-9_]*" __CFID_RE = re.compile(CF_ID+r"$") -def check_cf_standard_name(test_val, error=False): +def check_cf_standard_name(test_val, prop_dict, error): """Return <test_val> if a valid CF Standard Name, otherwise, None http://cfconventions.org/Data/cf-standard-names/docs/guidelines.html if <error> is True, raise an Exception if <test_val> is not valid. - >>> check_cf_standard_name("hi_mom") + >>> check_cf_standard_name("hi_mom", None, False) 'hi_mom' - >>> check_cf_standard_name("hi mom") + >>> check_cf_standard_name("hi mom", None, False) - >>> check_cf_standard_name("hi mom", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_cf_standard_name("hi mom", None, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'hi_mom' is not a valid CF Standard Name - >>> check_cf_standard_name("") #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_cf_standard_name("", None, False) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: CCPP Standard Name cannot be blank - >>> check_cf_standard_name("_hi_mom") + >>> check_cf_standard_name("_hi_mom", None, False) - >>> check_cf_standard_name("2pac") + >>> check_cf_standard_name("2pac", None, False) - >>> check_cf_standard_name("Agood4tranID") - - >>> check_cf_standard_name("agoodcfid") + >>> check_cf_standard_name("Agood4tranID", None, False) + 'agood4tranid' + >>> check_cf_standard_name("agoodcfid", None, False) 'agoodcfid' """ if len(test_val) == 0: raise CCPPError("CCPP Standard Name cannot be blank") else: match = __CFID_RE.match(test_val) - # End if + # end if if match is None: if error: - raise CCPPError("'{}' is not a valid CCPP Standard Name".format(test_val)) + errmsg = "'{}' is not a valid CCPP Standard Name" + raise CCPPError(errmsg.format(test_val)) else: test_val = None - # End if - # End if + # end if + else: + test_val = test_val.lower() + # end if return test_val ######################################################################## @@ -127,18 +204,20 @@ def check_cf_standard_name(test_val, error=False): ######################################################################## -# LITERAL_INT is a string representing an integer value -LITERAL_INT = r"([0-9]*)" # FORTRAN_ID is a string representing the regular expression for Fortran names FORTRAN_ID = r"([A-Za-z][A-Za-z0-9_]*)" __FID_RE = re.compile(FORTRAN_ID+r"$") # Note that the scalar array reference expressions below are not really for # scalar references because a colon can be a placeholder, unlike in Fortran code -FORTRAN_SCALAR_ARREF = r"\(\s*(?:"+FORTRAN_ID+r"|"+LITERAL_INT+r"|[:])\s*(?:,\s*(?:"+FORTRAN_ID+r"|"+LITERAL_INT+r"|[:])\s*){0,6}\)" -FORTRAN_SCALAR_REF = r"(?:"+"(?:"+LITERAL_INT+r"|"+FORTRAN_ID+")"+r"\s*"+"(?:"+LITERAL_INT+r"|"+FORTRAN_SCALAR_ARREF+r")"+r")" -_FORTRAN_SCALAR_REF_RE = re.compile(FORTRAN_SCALAR_REF+r"$") -FORTRAN_INTRINSIC_TYPES = [ "integer", "real", "logical", "complex", - "double precision", "character" ] +__FORTRAN_AID = r"(?:[A-Za-z][A-Za-z0-9_]*)" +__FORT_INT = r"[0-9]+" +__FORT_DIM = r"(?:"+__FORTRAN_AID+r"|[:]|"+__FORT_INT+r")" +__REPEAT_DIM = r"(?:,\s*"+__FORT_DIM+r"\s*)" +__FORTRAN_SCALAR_ARREF = r"[(]\s*("+__FORT_DIM+r"\s*"+__REPEAT_DIM+r"{0,6})[)]" +FORTRAN_SCALAR_REF = r"(?:"+FORTRAN_ID+r"\s*"+__FORTRAN_SCALAR_ARREF+r")" +FORTRAN_SCALAR_REF_RE = re.compile(FORTRAN_SCALAR_REF+r"$") +FORTRAN_INTRINSIC_TYPES = ["integer", "real", "logical", "complex", + "double precision", "character"] FORTRAN_DP_RE = re.compile(r"(?i)double\s*precision") FORTRAN_TYPE_RE = re.compile(r"(?i)type\s*\(\s*("+FORTRAN_ID+r")\s*\)") @@ -146,29 +225,29 @@ def check_cf_standard_name(test_val, error=False): ######################################################################## -def check_fortran_id(test_val, max_len=0, error=False): +def check_fortran_id(test_val, prop_dict, error, max_len=0): """Return <test_val> if a valid Fortran identifier, otherwise, None If <max_len> > 0, <test_val> must not be longer than <max_len>. if <error> is True, raise an Exception if <test_val> is not valid. - >>> check_fortran_id("hi_mom") + >>> check_fortran_id("hi_mom", None, False) 'hi_mom' - >>> check_fortran_id("hi_mom", max_len=5) + >>> check_fortran_id("hi_mom", None, False, max_len=5) - >>> check_fortran_id("hi_mom", max_len=5, error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_id("hi_mom", None, True, max_len=5) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'hi_mom' is too long (> 5 chars) - >>> check_fortran_id("hi mom") + >>> check_fortran_id("hi mom", None, False) - >>> check_fortran_id("hi mom", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_id("hi mom", None, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'hi_mom' is not a valid Fortran identifier - >>> check_fortran_id("") + >>> check_fortran_id("", None, False) - >>> check_fortran_id("_hi_mom") + >>> check_fortran_id("_hi_mom", None, False) - >>> check_fortran_id("2pac") + >>> check_fortran_id("2pac", None, False) - >>> check_fortran_id("Agood4tranID") + >>> check_fortran_id("Agood4tranID", None, False) 'Agood4tranID' """ match = __FID_RE.match(test_val) @@ -177,97 +256,200 @@ def check_fortran_id(test_val, max_len=0, error=False): raise CCPPError("'{}' is not a valid Fortran identifier".format(test_val)) else: test_val = None - # End if + # end if elif (max_len > 0) and (len(test_val) > max_len): if error: raise CCPPError("'{}' is too long (> {} chars)".format(test_val, max_len)) else: test_val = None - # End if - # End if + # end if + # end if return test_val ######################################################################## -def check_fortran_ref(test_val, max_len=0, error=False): +def fortran_list_match(test_str): + """Check if <test_str> could be a list of Fortran expressions. + The list must be enclosed in parentheses and separated by commas. + If the list appears okay, return the items (for further checking) + >>> fortran_list_match('(ccpp_constant_one:dim1)') + ['ccpp_constant_one:dim1'] + >>> fortran_list_match('(foo, bar)') + ['foo', 'bar'] + >>> fortran_list_match('()') + [''] + >>> fortran_list_match('(foo, ,)') + + >>> fortran_list_match('foo, bar') + + >>> fortran_list_match('(foo, bar') + + """ + parens, parene = check_balanced_paren(test_str) + if (parens >= 0) and (parene > parens): + litems = [x.strip() for x in test_str[parens+1:parene].split(',')] + if (len(litems) > 1) and (min([len(x) for x in litems]) == 0): + litems = None + # end if + else: + litems = None + # end if + return litems + +######################################################################## + +def check_fortran_ref(test_val, prop_dict, error, max_len=0): """Return <test_val> if a valid simple Fortran variable reference, otherwise, None. A simple Fortran variable reference is defined as a scalar id or a scalar array reference. if <error> is True, raise an Exception if <test_val> is not valid. - >>> check_fortran_ref("hi_mom") + >>> FORTRAN_SCALAR_REF_RE.match("foo( bar, baz )").group(1) + 'foo' + >>> FORTRAN_SCALAR_REF_RE.match("foo( bar, baz )").group(2) + 'bar, baz ' + >>> FORTRAN_SCALAR_REF_RE.match("foo( bar, baz )").group(2).split(',')[0].strip() + 'bar' + >>> FORTRAN_SCALAR_REF_RE.match("foo( :, baz )").group(2).split(',')[0].strip() + ':' + >>> FORTRAN_SCALAR_REF_RE.match("foo( bar, baz )").group(2).split(',')[1].strip() + 'baz' + >>> check_fortran_ref("hi_mom", None, False) 'hi_mom' - >>> check_fortran_ref("hi_mom", max_len=5) + >>> check_fortran_ref("hi_mom", None, False, max_len=5) - >>> check_fortran_ref("hi_mom", max_len=5, error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_ref("hi_mom", None, True, max_len=5) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'hi_mom' is too long (> 5 chars) - >>> check_fortran_ref("hi mom") + >>> check_fortran_ref("hi mom", None, False) - >>> check_fortran_ref("hi mom", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_ref("hi mom", None, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'hi_mom' is not a valid Fortran identifier - >>> check_fortran_ref("") + >>> check_fortran_ref("", None, False) - >>> check_fortran_ref("_hi_mom") + >>> check_fortran_ref("_hi_mom", None, False) - >>> check_fortran_ref("2pac") + >>> check_fortran_ref("2pac", None, False) - >>> check_fortran_ref("Agood4tranID") + >>> check_fortran_ref("Agood4tranID", None, False) 'Agood4tranID' - >>> check_fortran_ref("foo(bar)") + >>> check_fortran_ref("foo(bar)", None, False) 'foo(bar)' - >>> check_fortran_ref("foo( bar, baz )") + >>> check_fortran_ref("foo( bar, baz )", None, False) 'foo( bar, baz )' - >>> check_fortran_ref("foo( bar, )") + >>> check_fortran_ref("foo( :, baz )", None, False) + 'foo( :, baz )' + >>> check_fortran_ref("foo( bar, )", None, False) - >>> check_fortran_ref("foo( bar, )", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_ref("foo( bar, )", None, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'foo( bar, )' is not a valid Fortran scalar reference - >>> check_fortran_ref("foo()", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_ref("foo()", None, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'foo()' is not a valid Fortran scalar reference - >>> check_fortran_ref("foo(bar, bazz)", max_len=3, error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_ref("foo(bar, bazz)", None, True, max_len=3) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'bazz' is too long (> 3 chars) in foo(bar, bazz) - >>> check_fortran_ref("foo(barr, baz)", max_len=3, error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_ref("foo(barr, baz)", None, True, max_len=3) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'bazr' is too long (> 3 chars) in foo(barr, baz) - >>> check_fortran_ref("fooo(bar, baz)", max_len=3, error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_ref("fooo(bar, baz)", None, True, max_len=3) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'foo' is too long (> 3 chars) in fooo(bar, baz) """ - idval = check_fortran_id(test_val, max_len=max_len, error=False) + idval = check_fortran_id(test_val, prop_dict, False, max_len=max_len) if idval is None: - match = _FORTRAN_SCALAR_REF_RE.match(test_val) + match = FORTRAN_SCALAR_REF_RE.match(test_val) if match is None: if error: - raise CCPPError("'{}' is not a valid Fortran scalar reference".format(test_val)) + emsg = "'{}' is not a valid Fortran scalar reference" + raise CCPPError(emsg.format(test_val)) else: test_val = None - # End if + # end if elif max_len > 0: tokens = test_val.strip().rstrip(')').split('(') - tokens = [tokens[0].strip()] + [x.strip() for x in tokens[1].split(',')] + tokens = [tokens[0].strip()] + [x.strip() + for x in tokens[1].split(',')] for token in tokens: if len(token) > max_len: if error: - raise CCPPError("'{}' is too long (> {} chars) in {}".format(token, max_len, test_val)) + emsg = "'{}' is too long (> {} chars) in {}" + raise CCPPError(emsg.format(token, max_len, test_val)) else: test_val = None break - # End if - # End if - # End for - # End if - # End if + # end if + # end if + # end for + # end if + # end if return test_val ######################################################################## +def check_local_name(test_val, prop_dict, error, max_len=0): + """Return <test_val> if a valid simple Fortran variable reference, + or Fortran constant, otherwise, None. + A simple Fortran variable reference is defined as a scalar id or a + scalar array reference. + A constant is only valid if <prop_dict> is not None, the 'protected' + property is present and True, and the 'type' property matches the + type of <test_val>. + if <error> is True, raise an Exception if <test_val> is not valid. + >>> check_local_name("hi_mom", None, error=False) + 'hi_mom' + >>> check_local_name('122', {'protected':True,'type':'integer'}, error=False) + '122' + >>> check_local_name('122', None, error=False) + + >>> check_local_name('122', {}, error=False) + + >>> check_local_name('122', {'protected':False,'type':'integer'}, error=False) + + >>> check_local_name('122', {'protected':True,'type':'real'}, error=False) + + >>> check_local_name('-122.e4', {'protected':True,'type':'real'}, error=False) + '-122.e4' + >>> check_local_name('-122.', {'protected':True,'type':'real','kind':'kp'}, error=False) + + >>> check_local_name('-122._kp', {'protected':True,'type':'real','kind':'kp'}, error=False) + '-122._kp' + >>> check_local_name('q(:,:,index_of_water_vapor_specific_humidity)', {}, error=False) + 'q(:,:,index_of_water_vapor_specific_humidity)' + """ + valid_val = None + # First check for a constant + if (prop_dict is not None) and ('protected' in prop_dict): + protected = prop_dict['protected'] + else: + protected = False + # end if + if (prop_dict is not None) and ('type' in prop_dict): + vtype = prop_dict['type'] + else: + vtype = "" + # end if + if (prop_dict is not None) and ('kind' in prop_dict): + kind = prop_dict['kind'] + else: + kind = "" + # end if + if protected and vtype and check_fortran_literal(test_val, vtype, kind): + valid_val = test_val + # end if + if valid_val is None: + valid_val = check_fortran_ref(test_val, prop_dict, error, max_len=max_len) + # end if + return valid_val + + +######################################################################## + def check_fortran_intrinsic(typestr, error=False): """Return <test_val> if a valid Fortran intrinsic type, otherwise, None if <error> is True, raise an Exception if <test_val> is not valid. - >>> check_fortran_intrinsic("real") + >>> check_fortran_intrinsic("real", error=False) 'real' >>> check_fortran_intrinsic("complex") 'complex' @@ -297,54 +479,55 @@ def check_fortran_intrinsic(typestr, error=False): >>> check_fortran_intrinsic("complex(kind=r8)") """ - match = typestr.strip().lower() in FORTRAN_INTRINSIC_TYPES - if (not match) and (typestr.lower()[0:6] == 'double'): + chk_type = typestr.strip().lower() + match = chk_type in FORTRAN_INTRINSIC_TYPES + if (not match) and (chk_type[0:6] == 'double'): # Special case for double precision - match = FORTRAN_DP_RE.match(typestr.strip()) is not None + match = FORTRAN_DP_RE.match(chk_type) is not None # End if if not match: if error: raise CCPPError("'{}' is not a valid Fortran type".format(typestr)) else: typestr = None - # End if - # End if + # end if + # end if return typestr ######################################################################## -def check_fortran_type(typestr, error=False): +def check_fortran_type(typestr, prop_dict, error): """Return <typestr> if a valid Fortran type, otherwise, None if <error> is True, raise an Exception if <typestr> is not valid. - >>> check_fortran_type("real") + >>> check_fortran_type("real", None, False) 'real' - >>> check_fortran_type("integer") + >>> check_fortran_type("integer", None, False) 'integer' - >>> check_fortran_type("InteGer") + >>> check_fortran_type("InteGer", None, False) 'InteGer' - >>> check_fortran_type("character") + >>> check_fortran_type("character", None, False) 'character' - >>> check_fortran_type("double precision") + >>> check_fortran_type("double precision", None, False) 'double precision' - >>> check_fortran_type("double precision") + >>> check_fortran_type("double precision", None, False) 'double precision' - >>> check_fortran_type("doubleprecision") + >>> check_fortran_type("doubleprecision", None, False) 'doubleprecision' - >>> check_fortran_type("complex") + >>> check_fortran_type("complex", None, False) 'complex' - >>> check_fortran_type("char", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_type("char", {}, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'char' is not a valid Fortran type - >>> check_fortran_type("int") + >>> check_fortran_type("int", None, False) - >>> check_fortran_type("char", error=False) + >>> check_fortran_type("char", {}, False) - >>> check_fortran_type("type") + >>> check_fortran_type("type", None, False) - >>> check_fortran_type("type", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + >>> check_fortran_type("type", {}, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): - CCPPError: 'char' is not a valid derived Fortran type - >>> check_fortran_type("type(hi mom)", error=True) #doctest: +IGNORE_EXCEPTION_DETAIL + CCPPError: 'type' is not a valid derived Fortran type + >>> check_fortran_type("type(hi mom)", {}, True) #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): CCPPError: 'type(hi mom)' is not a valid derived Fortran type """ @@ -353,20 +536,406 @@ def check_fortran_type(typestr, error=False): if match is None: match = registered_fortran_ddt_name(typestr) dt = " derived" - # End if - # DH* 20190913 - skip checking if a DDT is registered at this time - #if match is None: - # if error: - # raise CCPPError("'{}' is not a valid{} Fortran type".format(typestr, dt)) - # else: - # typestr = None - # # End if - ## End if - # *DH 20190913 + # end if + if match is None: + if error: + emsg = "'{}' is not a valid{} Fortran type" + raise CCPPError(emsg.format(typestr, dt)) + else: + typestr = None + # end if + # end if return typestr ######################################################################## +def check_fortran_literal(value, typestr, kind): + """Return True iff <value> is a valid Fortran literal of type, <typestr>. + Note: no attempt is made to handle the older D syntax for real literals. + To promote clean coding, real values MUST have a decimal point, however, + this check is not available for the complex type so we just require + the two components to either both be integers or both be reals. + If <kind> is not an empty string, it is required to be present (i.e., if + <kind> == 'kind_phys', <value> should be of the form, 123.4_kind_phys) + >>> check_fortran_literal("123", "integer", "") + True + >>> check_fortran_literal("123", "INTEGER", "") + True + >>> check_fortran_literal("-123", "integer", "") + True + >>> check_fortran_literal("+123", "integer", "") + True + >>> check_fortran_literal("+123", "integer", "kind_int") + False + >>> check_fortran_literal("+123_kind_int", "integer", "kind_int") + True + >>> check_fortran_literal("+123_int", "integer", "kind_int") + False + >>> check_fortran_literal("123", "real", "") + False + >>> check_fortran_literal("123.", "real", "") + True + >>> check_fortran_literal("123.45", "real", "kind_phys") + False + >>> check_fortran_literal("123.45_8", "real", "kind_phys") + False + >>> check_fortran_literal("123.45_kind_phys", "real", "kind_phys") + True + >>> check_fortran_literal("123", "double precision", "") + False + >>> check_fortran_literal("123.", "doubleprecision", "") + True + >>> check_fortran_literal("123.45", "double precision", "kind_phys") + False + >>> check_fortran_literal("123.45_8", "doubleprecision", "kind_phys") + False + >>> check_fortran_literal("123.45_kp", "doubleprecision", "kp") + True + >>> check_fortran_literal("123", "logical", "") + False + >>> check_fortran_literal(".true.", "logical", "") + True + >>> check_fortran_literal(".false.", "logical", "") + True + >>> check_fortran_literal("T", "logical", "") + False + >>> check_fortran_literal("F", "logical", "") + False + >>> check_fortran_literal(".TRUE.", "logical", "kind_log") + False + >>> check_fortran_literal(".TRUE._kind_log", "logical", "kind_log") + True + >>> check_fortran_literal("(123.,456.)", "complex", "") + True + >>> check_fortran_literal("(123. , 456.)", "complex", "") + True + >>> check_fortran_literal("(123.,456", "complex", "") + False + >>> check_fortran_literal("(123. , 456.)", "complex", "kp") + False + >>> check_fortran_literal("(123._kp , 456)", "complex", "kp") + False + >>> check_fortran_literal("(123._kp , 456._kp)", "complex", "kp") + True + >>> check_fortran_literal("'hi mom'", "character", "") + True + >>> check_fortran_literal("'hi mom", "character", "") + False + >>> check_fortran_literal('"hi mom"', "character", "") + True + >>> check_fortran_literal('"hi""mom"', "character", "") + True + >>> check_fortran_literal('"hi" "mom"', "character", "") + False + >>> check_fortran_literal("'hi''there''mom'", "character", "") + True + >>> check_fortran_literal("'hi mom'", "character", "kc") + False + >>> check_fortran_literal("kc_'hi mom'", "character", "kc") + True + >>> check_fortran_literal("123._kp", "float", "kp") #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ParseInternalError: ERROR: 'float' is not a Fortran intrinsic type + """ + valid = True + if FORTRAN_DP_RE.match(typestr.strip()) is not None: + vtype = 'real' + else: + vtype = typestr.lower() + # end if + # Check complex first + if vtype == 'complex': + cvals = value.strip().split(',') + if len(cvals) == 2: + tp = 'integer' + if ('.' in cvals[0]) and ('.' in cvals[1]): + tp = 'real' + elif ('.' in cvals[0]) or ('.' in cvals[1]): + valid = False + # end if + if (cvals[0][0] == '(') and (cvals[1][-1] == ')'): + valid = valid and check_fortran_literal(cvals[0][1:], tp, kind) + valid = valid and check_fortran_literal(cvals[1][:-1], tp, kind) + else: + valid = False + # end if + else: + valid = False + elif valid: + vparts = value.strip().split('_') + if vtype == 'character': + if len(vparts) > 1: + val = vparts[-1] + vkind = '_'.join(vparts[0:-1]) + else: + val = vparts[0] + vkind = '' + # end if + else: + val = vparts[0] + if len(vparts) > 1: + vkind = '_'.join(vparts[1:]) + else: + vkind = '' + # end if + # end if + if vkind != kind.lower(): + valid = False + # end if, kind is okay, check value + if valid and (vtype == 'integer'): + try: + vtest = int(val) + except ValueError as ve: + valid = False + # End try + elif valid and (vtype == 'real'): + if '.' not in val: + valid = False + else: + try: + vtest = float(val) + except ValueError as ve: + valid = False + # End try + # end if + elif valid and (vtype == 'logical'): + valid = (val.upper() == '.TRUE.') or (val.upper() == '.FALSE.') + elif valid and (vtype == 'character'): + sep = val[0] + cparts = val.split(sep) + # We must have balanced delimiters + if len(cparts)%2 == 0: + valid = False + else: + for index in range(len(cparts)): + if (index%2 == 0) and (len(cparts[index]) > 0): + valid = False + break + # end if + # end for + # end if (else okay) + elif valid: + errmsg = "ERROR: '{}' is not a Fortran intrinsic type" + raise ParseInternalError(errmsg.format(typestr)) + # end if (no else) + # end if + return valid + +def check_default_value(test_val, prop_dict, error): + """Return <test_val> if a valid default value for a CCPP field, + otherwise, None. + If <error> is True, raise an Exception if <value> is not valid. + A valid value is determined by the 'type' of the variable. It is an + error for there to be no 'type' property in <prop_dict>. + >>> check_default_value('314', {'type':'integer'}, False) + '314' + >>> check_default_value('314', {'type':'integer'}, True) + '314' + >>> check_default_value('314', {'type':'integer', 'kind':'ikind'}, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: 314 is not a valid Fortran integer of kind, ikind + >>> check_default_value('314_ikind', {'type':'integer', 'kind':'ikind'}, True) + '314_ikind' + >>> check_default_value('314', {'type':'real'}, False) + + >>> check_default_value('314', {'type':'real'}, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: 314 is not a valid Fortran real + >>> check_default_value('3.14', {'type':'real'}, False) + '3.14' + >>> check_default_value('314', {'tipe':'integer'}, False) + + >>> check_default_value('314', {'local_name':'foo'}, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: foo does not have a 'type' attribute + >>> check_default_value('314', {'tipe':'integer'}, False) + + >>> check_default_value('314', None, True) + '314' + """ + valid = None + if prop_dict and ('type' in prop_dict): + valid = test_val + var_type = prop_dict['type'].lower().strip() + if 'kind' in prop_dict: + vkind = prop_dict['kind'].lower().strip() + else: + vkind = '' + # end if + if not check_fortran_literal(test_val, var_type, vkind): + valid = None + if error: + emsg = '{} is not a valid Fortran {}' + if vkind: + emsg += ' of kind, {}' + raise CCPPError(emsg.format(test_val, var_type, vkind)) + # end if + # end if (no else, <test_val> is okay) + elif prop_dict is None: + # Special case for checks during parsing, always pass + valid = test_val + elif error: + emsg = "{} does not have a 'type' attribute" + if 'local_name' in prop_dict: + lname = prop_dict['local_name'] + else: + lname = 'UNKNOWN' + # end if + raise CCPPError(emsg.format(lname)) + # end if + return valid + +def check_valid_values(test_val, prop_dict, error): + """Return <test_val> if a valid 'valid_values' attribute value, + otherwise, None. + If <error> is True, raise an Exception if <value> is not valid. + """ + raise ParseInternalError("NOT IMPLEMENTED") + +def check_diagnostic_fixed(test_val, prop_dict, error): + """Return <test_val> if a valid descriptor for a CCPP diagnostic, + otherwise, None. + If <error> is True, raise an Exception if <value> is not valid. + A fixed diagnostic name is any Fortran identifier, however, it is + an error to specify both 'diagnostic_name' and 'diagnostic_name_fixed'. + >>> check_diagnostic_fixed("foo", {'diagnostic_name_fixed' : 'foo'}, False) + 'foo' + >>> check_diagnostic_fixed("foo", {'diagnostic_name_fixed' : 'foo'}, True) + 'foo' + >>> check_diagnostic_fixed("foo", {'diagnostic_name' : 'foo'}, False) + + >>> check_diagnostic_fixed("foo", {'diagnostic_name':'','local_name':'hi','standard_name':'mom'}, True) + 'foo' + >>> check_diagnostic_fixed("foo", {'diagnostic_name':'foo','local_name':'hi','standard_name':'mom'}, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: hi (mom) cannot have both 'diagnostic_name' and 'diagnostic_name_fixed' attributes + >>> check_diagnostic_fixed("2foo", {'diagnostic_name_fixed':'foo','local_name':'hi','standard_name':'mom'}, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: '2foo' (hi) is not a valid fixed diagnostic name + """ + valid = test_val + if (prop_dict and ('diagnostic_name' in prop_dict) and + prop_dict['diagnostic_name']): + valid = None + if error: + emsg = "{} ({}) cannot have both 'diagnostic_name' and " + emsg += "'diagnostic_name_fixed' attributes" + if 'local_name' in prop_dict: + lname = prop_dict['local_name'] + else: + lname = 'UNKNOWN' + # end if + if 'standard_name' in prop_dict: + sname = prop_dict['standard_name'] + else: + sname = 'UNKNOWN' + # end if + raise CCPPError(emsg.format(lname, sname)) + # end if + elif check_fortran_id(test_val, prop_dict, False) is None: + valid = None + if error: + emsg = "'{}' ({}) is not a valid fixed diagnostic name" + if 'local_name' in prop_dict: + lname = prop_dict['local_name'] + else: + lname = 'UNKNOWN' + # end if + raise CCPPError(emsg.format(test_val, lname)) + # end if + # end if + return valid + +######################################################################## + +_DIAG_PRE = r"("+FORTRAN_ID+")?" +_DIAG_SUFF = r"([_0-9A-Za-z]+)?" +_DIAG_PROP = r"((\${process}|\${scheme_name})"+_DIAG_SUFF+r")" +_DIAG_RE = re.compile(_DIAG_PRE+_DIAG_PROP+r"?$") + +def check_diagnostic_id(test_val, prop_dict, error): + """Return <test_val> if a valid descriptor for a CCPP diagnostic, + otherwise, None. + If <error> is True, raise an Exception if <value> is not valid. + A diagnostic name is a Fortran identifier with the optional + addition of one variable substitution. + A variable substitution is a substring of the form of either: + ${process}: The scheme process name will be substituted for this + substring. If this substring is included, it is an error for + there to be no process specified by the scheme (although this + error cannot be detected by this routine). + ${scheme_name}: The scheme name will be substituted for this substring. + It is an error to specify both 'diagnostic_name' and + 'diagnostic_name_fixed'. + >>> check_diagnostic_id("foo", {'diagnostic_name' : 'foo'}, False) + 'foo' + >>> check_diagnostic_id("foo", {'diagnostic_name' : 'foo'}, True) + 'foo' + >>> check_diagnostic_id("foo", {'diagnostic_name_fixed' : 'foo'}, False) + + >>> check_diagnostic_id("foo_${process}", {}, False) + 'foo_${process}' + >>> check_diagnostic_id("foo_${process}_2bad", {}, False) + 'foo_${process}_2bad' + >>> check_diagnostic_id("${process}_2bad", {}, False) + '${process}_2bad' + >>> check_diagnostic_id("foo_${scheme_name}", {}, False) + 'foo_${scheme_name}' + >>> check_diagnostic_id("foo_${scheme_name}_2bad", {}, False) + 'foo_${scheme_name}_2bad' + >>> check_diagnostic_id("${scheme_name}_suff", {}, False) + '${scheme_name}_suff' + >>> check_diagnostic_id("pref_${scheme}_suff", {}, False) + + >>> check_diagnostic_id("pref_${scheme_name_suff", {}, False) + + >>> check_diagnostic_id("pref_$scheme_name}_suff", {}, False) + + >>> check_diagnostic_id("pref_{scheme_name}_suff", {}, False) + + >>> check_diagnostic_id("foo", {'diagnostic_name_fixed':'','local_name':'hi','standard_name':'mom'}, True) + 'foo' + >>> check_diagnostic_id("foo", {'diagnostic_name_fixed':'foo','local_name':'hi','standard_name':'mom'}, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: hi (mom) cannot have both 'diagnostic_name' and 'diagnostic_name_fixed' attributes + >>> check_diagnostic_id("2foo", {'diagnostic_name':'foo','local_name':'hi','standard_name':'mom'}, True) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: '2foo' (hi) is not a valid diagnostic name + """ + if (prop_dict and ('diagnostic_name_fixed' in prop_dict) and + prop_dict['diagnostic_name_fixed']): + valid = None + if error: + emsg = "{} ({}) cannot have both 'diagnostic_name' and " + emsg += "'diagnostic_name_fixed' attributes" + if 'local_name' in prop_dict: + lname = prop_dict['local_name'] + else: + lname = 'UNKNOWN' + # end if + if 'standard_name' in prop_dict: + sname = prop_dict['standard_name'] + else: + sname = 'UNKNOWN' + # end if + raise CCPPError(emsg.format(lname, sname)) + # end if + else: + match = _DIAG_RE.match(test_val) + if match is None: + valid = None + if error: + emsg = "'{}' is not a valid diagnostic_name value" + raise CCPPError(emsg.format(test_val)) + # end if + else: + valid = test_val + # end if + # end if + return valid + +######################################################################## + def check_balanced_paren(string, start=0, error=False): """Return <string> indices delineating a balance set of parentheses. Parentheses in character context do not count. @@ -374,7 +943,7 @@ def check_balanced_paren(string, start=0, error=False): Return start and end indices if found If no parentheses are found, return (-1, -1). If a left parenthesis is found but no balancing right, return (begin, -1) - where begin + where begin is the index where the left parenthesis was found. If error is True, raise a CCPPError. >>> check_balanced_paren("foo") (-1, -1) @@ -405,35 +974,40 @@ def check_balanced_paren(string, start=0, error=False): elif inchar is None: inchar = string[index] # else in character context, keep going - # End if + # end if elif inchar is not None: # In character context, keep going pass elif string[index] == '(': if depth == 0: begin = index - # End if + # end if depth = depth + 1 if depth == 0: break - # End if + # end if elif string[index] == ')': depth = depth - 1 if depth == 0: end = index break - # End if + # end if # else just keep going - # End if + # end if index = index + 1 # End while if (begin >= 0) and (end < 0) and error: raise CCPPError("ERROR: Unbalanced parenthesis in '{}'".format(string)) - # End if + # end if return begin, end ######################################################################## +def registered_fortran_ddt_names(): + return _REGISTERED_FORTRAN_DDT_NAMES + +######################################################################## + def registered_fortran_ddt_name(name): if name in _REGISTERED_FORTRAN_DDT_NAMES: return name diff --git a/scripts/parse_tools/parse_log.py b/scripts/parse_tools/parse_log.py index 150293a1..ec96e4a1 100644 --- a/scripts/parse_tools/parse_log.py +++ b/scripts/parse_tools/parse_log.py @@ -7,30 +7,43 @@ # CCPP framework imports def init_log(name, level=None): + """Initialize a new logger object""" logger = logging.getLogger(name) # Turn logging to WARNING if not set llevel = logger.getEffectiveLevel() if (level is None) and (llevel == logging.NOTSET): logger.setLevel(logging.WARNING) + elif level: + logger.setLevel(level) # End if set_log_to_stdout(logger) return logger def set_log_level(logger, level): + """Set the logging level of <logger> to <level>""" logger.setLevel(level) def remove_handlers(logger): + """Remove all handlers from <logger>""" for handler in list(logger.handlers): logger.removeHandler(handler) def set_log_to_stdout(logger): + """Set <logger> to log to standard out""" remove_handlers(logger) logger.addHandler(logging.StreamHandler()) def set_log_to_null(logger): + """Set <logger> to log to NULL""" remove_handlers(logger) logger.addHandler(logging.NullHandler()) def set_log_to_file(logger, filename): + """Set <logger> to log to <filename>""" remove_handlers(logger) logger.addHandler(logging.StreamHandler()) + +def flush_log(logger): + """Flush all pending output from <logger>""" + for handler in list(logger.handlers): + handler.flush() diff --git a/scripts/parse_tools/parse_object.py b/scripts/parse_tools/parse_object.py index 2bff3d13..6f13dd64 100755 --- a/scripts/parse_tools/parse_object.py +++ b/scripts/parse_tools/parse_object.py @@ -1,19 +1,17 @@ #!/usr/bin/env python """A module for the base, ParseObject class""" -# Python library imports -import re # CCPP framework imports -from .parse_source import ParseContext, CCPPError +from parse_source import ParseContext, CCPPError, context_string ######################################################################## class ParseObject(ParseContext): """ParseObject is a simple class that keeps track of an object's place in a file and safely produces lines from an array of lines - >>> ParseObject('foobar.F90', 1) #doctest: +ELLIPSIS + >>> ParseObject('foobar.F90', []) #doctest: +ELLIPSIS <__main__.ParseObject object at 0x...> - >>> ParseObject('foobar.F90', 1).filename + >>> ParseObject('foobar.F90', []).filename 'foobar.F90' >>> ParseObject('foobar.F90', ["##hi mom",], line_start=1).curr_line() (None, 1) @@ -33,84 +31,143 @@ class ParseObject(ParseContext): ('!! hi mom', 1) """ + _max_errors = 32 + def __init__(self, filename, lines_in, line_start=0): - self._filename = filename - self._lines = lines_in - self._line_start = line_start - self._line_end = line_start - self._line_next = line_start + """Initialize this ParseObject""" + self.__filename = filename + self.__lines = lines_in + self.__line_start = line_start + self.__line_end = line_start + self.__line_next = line_start + self.__num_lines = len(self.__lines) + self.__error_message = "" + self.__num_errors = 0 super(ParseObject, self).__init__(linenum=line_start, filename=filename) @property def first_line_num(self): - 'Return the first line parsed' - return self._first_line + """Return the first line parsed""" + return self.__line_start @property def last_line_num(self): - 'Return the last line parsed' - return self._line_end + """Return the last line parsed""" + return self.__line_end + + def valid_line(self): + """Return True if the current line is valid""" + return (self.line_num >= 0) and (self.line_num < self.__num_lines) + + @property + def file_name(self): + """Return this object's filename""" + return self.__filename + + @property + def error_message(self): + """Return this object's error message""" + return self.__error_message def curr_line(self): - valid_line = self.line_num < len(self._lines) + """Return the current line (if valid) and the current line number. + If the current line is invalid, return None""" + valid_line = self.valid_line() _curr_line = None _my_curr_lineno = self.line_num if valid_line: try: - _curr_line = self._lines[self.line_num].rstrip() - self._line_next = self.line_num + 1 - self._line_end = self._line_next - except CCPPError as exc: + _curr_line = self.__lines[self.line_num].rstrip() + self.__line_next = self.line_num + 1 + self.__line_end = self.__line_next + except CCPPError: + self.add_syntax_err("line", self.line_num) valid_line = False - # End if - # We allow continuation self._lines (ending with a single backslash) + # end if + # We allow continuation self.__lines (ending with a single backslash) if valid_line and _curr_line.endswith('\\'): - next_line, lnum = self.next_line() + next_line, _ = self.next_line() if next_line is None: # We ran out of lines, just strip the backslash _curr_line = _curr_line[0:len(_curr_line)-1] else: _curr_line = _curr_line[0:len(_curr_line)-1] + next_line - # End if - # End if + # end if + # end if # curr_line should not change the line number self.line_num = _my_curr_lineno return _curr_line, self.line_num def next_line(self): - self.line_num = self._line_next + """Return the next line in our file (if valid) and the next line's + line number. If the next line is not valid, return None""" + self.line_num = self.__line_next return self.curr_line() def peek_line(self, line_num): - if (line_num >= 0) and (line_num < len(self._lines)): - return self._lines[line_num] - else: - return None - # End if + """Return the text of <line_num> without advancing to that line. + if <line_num> is out of bounds, return None.""" + if (line_num >= 0) and (line_num < len(self.__lines)): + return self.__lines[line_num] + # end if + return None + + def add_syntax_err(self, token_type, token=None, skip_context=False): + """Add a ParseSyntaxError-type message to this object's error + log, separating it from any previous messages with a newline.""" + if self.__error_message: + if self.__num_errors == self._max_errors: + self.__error_message += '\nMaximum number of errors exceeded' + self.line_num = self.__num_lines # Intentionally walk off end + self.__line_next = self.line_num + elif self.__num_errors > self._max_errors: + # Oops, something went wrong, panic! + raise CCPPError(self.error_message) + # end if + self.__error_message += '\n' + # end if + if self.__num_errors < self._max_errors: + if skip_context: + cstr = "" + else: + cstr = context_string(self) + # end if + if token is None: + self.__error_message += "{}{}".format(token_type, cstr) + else: + self.__error_message += "Invalid {}, '{}'{}".format(token_type, + token, cstr) + # end if + # end if + self.__num_errors += 1 def reset_pos(self, line_start=0): - if (line_start < 0) or (line_start >= len(self._lines)): - raise CCPPError('Attempt to reset_pos to non-existent line, {}'.format(line_start)) - else: - self.line_num = line_start - self._line_next = line_start - # End if + """Attempt to set the current file position to <line_start>. + If <line_start> is out of bounds, raise an exception.""" + if (line_start < 0) or (line_start >= self.__num_lines): + emsg = 'Attempt to reset_pos to non-existent line, {}' + raise CCPPError(emsg.format(line_start)) + # end if + self.line_num = line_start + self.__line_next = line_start def write_line(self, line_num, line): - "Overwrite line, <line_num> with <line>" - if (line_num < 0) or (line_num >= len(self._lines)): - raise CCPPError('Attempt to write non-existent line, {}'.format(line_num)) - else: - self._lines[line_num] = line - # End if + """Overwrite line, <line_num> with <line>. + If <line_start> is out of bounds, raise an exception.""" + if (line_num < 0) or (line_num >= len(self.__lines)): + emsg = 'Attempt to write non-existent line, {}' + raise CCPPError(emsg.format(line_num)) + # end if + self.__lines[line_num] = line def __del__(self): + """Attempt to cleanup memory used by this object""" try: - del self._lines + del self.__lines del self.regions - except Exception as e: + except Exception: pass # Python does not guarantee much about __del__ conditions - # End try + # end try ######################################################################## diff --git a/scripts/parse_tools/parse_source.py b/scripts/parse_tools/parse_source.py index 2296feb3..dfcb0432 100644 --- a/scripts/parse_tools/parse_source.py +++ b/scripts/parse_tools/parse_source.py @@ -2,13 +2,57 @@ """Classes to aid the parsing process""" +import sys +# Find python version +PY3 = sys.version_info[0] > 2 + +# pylint: disable=wrong-import-position # Python library imports -import collections +if PY3: + from collections.abc import Iterable +else: + from collections import Iterable +# end if import copy +import os.path +import logging # CCPP framework imports +# pylint: enable=wrong-import-position + +class _StdNameCounter(object): + """Class to hold a global counter to avoid using global keyword""" + __SNAME_NUM = 0 # Counter for unique standard names + + @classmethod + def new_stdname_number(cls): + """Increment and return the global counter.""" + _StdNameCounter.__SNAME_NUM += 1 + return _StdNameCounter.__SNAME_NUM + + @classmethod + def reset_stdname_counter(cls, reset_val=0): + """Reset the global counter to <reset_val>""" + _StdNameCounter.__SNAME_NUM = reset_val + +############################################################################### +def unique_standard_name(): +############################################################################### + """ + Return a unique standard name. + """ + return 'enter_standard_name_{}'.format(_StdNameCounter.new_stdname_number()) + +############################################################################### +def reset_standard_name_counter(): +############################################################################### + """ + Reset the unique_standard_name counter so that future calls to + unique_standard name will restart. + """ + _StdNameCounter.reset_stdname_counter() ############################################################################### -def context_string(context=None, with_comma=True): +def context_string(context=None, with_comma=True, nodir=False): ############################################################################### """Return a context string if <context> is not None otherwise, return an empty string. @@ -17,36 +61,68 @@ def context_string(context=None, with_comma=True): '' >>> context_string(with_comma=True) '' - >>> context_string(context= ParseContext(linenum=32, filename="source.F90"), with_comma=False) + >>> context_string(context= ParseContext(linenum=32, filename="dir/source.F90"), with_comma=False) + 'dir/source.F90:33' + >>> context_string(context= ParseContext(linenum=32, filename="dir/source.F90"), with_comma=True) + ', at dir/source.F90:33' + >>> context_string(context= ParseContext(linenum=32, filename="dir/source.F90")) + ', at dir/source.F90:33' + >>> context_string(context= ParseContext(filename="dir/source.F90"), with_comma=False) + 'dir/source.F90' + >>> context_string(context= ParseContext(filename="dir/source.F90"), with_comma=True) + ', in dir/source.F90' + >>> context_string(context= ParseContext(filename="dir/source.F90")) + ', in dir/source.F90' + >>> context_string(nodir=True) + '' + >>> context_string(with_comma=True, nodir=True) + '' + >>> context_string(context= ParseContext(linenum=32, filename="dir/source.F90"), with_comma=False, nodir=True) 'source.F90:33' - >>> context_string(context= ParseContext(linenum=32, filename="source.F90"), with_comma=True) + >>> context_string(context= ParseContext(linenum=32, filename="dir/source.F90"), with_comma=True, nodir=True) ', at source.F90:33' - >>> context_string(context= ParseContext(linenum=32, filename="source.F90")) + >>> context_string(context= ParseContext(linenum=32, filename="dir/source.F90"), nodir=True) ', at source.F90:33' - >>> context_string(context= ParseContext(filename="source.F90"), with_comma=False) + >>> context_string(context= ParseContext(filename="dir/source.F90"), with_comma=False, nodir=True) 'source.F90' - >>> context_string(context= ParseContext(filename="source.F90"), with_comma=True) + >>> context_string(context= ParseContext(filename="dir/source.F90"), with_comma=True, nodir=True) ', in source.F90' - >>> context_string(context= ParseContext(filename="source.F90")) + >>> context_string(context= ParseContext(filename="dir/source.F90"), nodir=True) ', in source.F90' """ + if context is None: + where_str = '' + elif context.line_num < 0: + where_str = 'in ' + else: + where_str = 'at ' + # End if + if (context is not None) and with_comma: + comma = ', ' + else: + comma = '' + where_str = '' # Override previous setting + # End if + if context is None: + spec = '' + elif nodir: + spec = '{ctx:nodir}' + else: + spec = '{ctx}' + # End if if context is None: cstr = "" - elif with_comma: - if context.line_num < 0: - cstr = ", in {}".format(context) - else: - cstr = ", at {}".format(context) - # End if else: - cstr = "{}".format(context) + cstr = '{comma}{where_str}' + spec # End if - return cstr + return cstr.format(comma=comma, where_str=where_str, ctx=context) ############################################################################### class CCPPError(ValueError): - "Class so programs can log user errors without backtrace" + """Class so programs can log user errors without backtrace""" def __init__(self, message): + """Initialize this exception""" + logging.shutdown() super(CCPPError, self).__init__(message) ######################################################################## @@ -54,6 +130,8 @@ def __init__(self, message): class ParseSyntaxError(CCPPError): """Exception that is aware of parsing context""" def __init__(self, token_type, token=None, context=None): + """Initialize this exception""" + logging.shutdown() cstr = context_string(context) if token is None: message = "{}{}".format(token_type, cstr) @@ -69,6 +147,8 @@ class ParseInternalError(Exception): Note that this error will not be trapped by programs such as ccpp_capgen """ def __init__(self, errmsg, context=None): + """Initialize this exception""" + logging.shutdown() message = "{}{}".format(errmsg, context_string(context)) super(ParseInternalError, self).__init__(message) @@ -77,15 +157,18 @@ def __init__(self, errmsg, context=None): class ParseContextError(CCPPError): """Exception for errors using ParseContext""" def __init__(self, errmsg, context): + """Initialize this exception""" + logging.shutdown() message = "{}{}".format(errmsg, context_string(context)) super(ParseContextError, self).__init__(message) ######################################################################## -class ContextRegion(collections.Iterable): +class ContextRegion(Iterable): """Class to imitate the LIFO nature of program language blocks""" def __init__(self): + """Initialize this ContextRegion""" self._lifo = list() def push(self, rtype, rname): @@ -97,29 +180,22 @@ def pop(self): return self._lifo.pop() def type_list(self): - "Return just the types in the list" + """Return just the types in the list""" return [x[0] for x in self._lifo] def __iter__(self): + """Local version of iterator""" for item in self._lifo: yield item[0] def __len__(self): + """Local implementation of len builtin""" return len(self._lifo) def __getitem__(self, index): + """Special item getter for a ContextRegion""" return self._lifo[index] - def __del__(self): - try: - while len(self._lifo) > 0: - x = self._lifo.pop() - del x - # End while - except Exception as e: - pass # Python does not guarantee much about __del__ conditions - # End try - ######################################################################## class ParseContext(object): @@ -140,7 +216,21 @@ class ParseContext(object): """ + # python 2/3 difference + try: + __pstr_types = (str, unicode) + except NameError: + __pstr_types = (str,) + # End try + def __init__(self, linenum=None, filename=None, context=None): + """Initialize this ParseContext""" + # Set regions first in case of exception + if context is not None: + self.__regions = copy.deepcopy(context.regions) + else: + self.__regions = ContextRegion() + # End if if context is not None: # If context is passed, ignore linenum linenum = context.line_num @@ -148,56 +238,73 @@ def __init__(self, linenum=None, filename=None, context=None): linenum = -1 elif not isinstance(linenum, int): raise CCPPError('ParseContext linenum must be an int') + # No else, everything is okay # End if if context is not None: # If context is passed, ignore filename filename = context.filename elif filename is None: filename = "<standard input>" - elif not isinstance(filename, str): + elif not isinstance(filename, ParseContext.__pstr_types): raise CCPPError('ParseContext filename must be a string') + # No else, everything is okay # End if self._linenum = linenum self._filename = filename - if context is not None: - self.regions = copy.deepcopy(context.regions) - else: - self.regions = ContextRegion() - # End if @property def line_num(self): - 'Return the current line' + """Return the current line""" return self._linenum @line_num.setter def line_num(self, newnum): + """Set a new line number for this context""" self._linenum = newnum @property def filename(self): - "'Return the object's filename" + """Return the object's filename""" return self._filename - def __str__(self): + @property + def regions(self): + """Return the object's region list""" + return self.__regions + + def __format__(self, spec): """Return a string representing the location in a file Note that self._linenum is zero based. + <spec> can be 'dir' (show filename directory) or 'nodir' filename only. + Any other spec entry is ignored. """ + if spec == 'dir': + fname = self._filename + elif spec == 'nodir': + fname = os.path.basename(self._filename) + else: + fname = self._filename + # End if if self._linenum >= 0: - return "{}:{}".format(self._filename, self._linenum+1) + fmt_str = "{}:{}".format(fname, self._linenum+1) else: - return "{}".format(self._filename) + fmt_str = "{}".format(fname) # End if + return fmt_str - def __del__(self): - try: - del self.regions - except Exception as e: - pass # Python does not guarantee much about __del__ conditions - # End try + def __str__(self): + """Return a string representing the location in a file + Note that self._linenum is zero based. + """ + if self._linenum >= 0: + retstr = "{}:{}".format(self._filename, self._linenum+1) + else: + retstr = "{}".format(self._filename) + # End if + return retstr def increment(self, inc=1): - "Increment the location within a file" + """Increment the location within a file""" if self._linenum < 0: self._linenum = 0 # End if @@ -207,51 +314,64 @@ def enter_region(self, region_type, region_name=None, nested_ok=True): """Mark the entry of a region (e.g., DDT, module, function). If nested_ok == False, throw an exception if the context is already inside a region with the same type.""" - if (not nested_ok) and (region_type in self.regions.type_list()): - raise ParseContextError("Cannot enter a nested {} region".format(region_type), self) + if (region_type not in self.__regions.type_list()) or nested_ok: + self.__regions.push(region_type, region_name) else: - self.regions.push(region_type, region_name) + emsg = "Cannot enter a nested {} region" + raise ParseContextError(emsg.format(region_type), self) + # End if def leave_region(self, region_type, region_name=None): """Mark the exit from a region. Check region name if possible""" - if len(self.regions) == 0: - raise ParseContextError("Cannot exit, not currently in any region", self) - else: - curr_type, curr_name = self.regions.pop() + if self.__regions: + curr_type, curr_name = self.__regions.pop() if curr_type != region_type: - raise ParseContextError("Trying to exit {} region while currently in {} region".format(region_type, curr_type), self) - elif (region_name is not None) and (curr_name is not None): + emsg = "Trying to exit {} region while currently in {} region" + raise ParseContextError(emsg.format(region_type, curr_type), + self) + # End if + if (region_name is not None) and (curr_name is not None): if region_name != curr_name: - raise ParseContextError("Trying to exit {} {} while currently in {} {}".format(region_type, region_name, curr_type, curr_name), self) + emsg = "Trying to exit {} {} while currently in {} {}" + raise ParseContextError(emsg.format(region_type, + region_name, + curr_type, + curr_name), self) # End if elif (region_name is not None) and (curr_name is None): - raise ParseContextError("Trying to exit {} {} while currently in unnamed {} region".format(region_type, region_name, curr_type), self) + emsg = "Trying to exit {} {} while currently in unnamed {} region" + raise ParseContextError(emsg.format(region_type, region_name, + curr_type), self) # End if + else: + raise ParseContextError("Cannot exit, not currently in any region", + self) # End if def curr_region(self): """Return the innermost current region""" - if len(self.regions) > 0: - return self.regions[-1] - else: - return None + curr = None + if self.__regions: + curr = self.__regions[-1] + # No else, will return None # End if + return curr def in_region(self, region_type, region_name=None): - "Return True iff we are currently in <region_type> <region_name>" + """Return True iff we are currently in <region_type> <region_name>""" return self.curr_region() == [region_type, region_name] def region_str(self): """Create a string describing the current region""" rgn_str = "" - for index in len(self.regions): - rtype, rname = self.regions[index] - if len(rgn_str) > 0: - rgn_str = rgn_str + " ==> " + for index in len(self.__regions): + rtype, rname = self.__regions[index] + if rgn_str: + rgn_str += " ==> " # End if - rgn_str = rgh_str + "{}".format(rtype) + rgn_str += "{}".format(rtype) if rname is not None: - rgn_str = rgh_str + " {}".format(rname) + rgn_str += " {}".format(rname) # End if # End for return rgn_str @@ -272,20 +392,24 @@ class ParseSource(object): """ def __init__(self, name_in, type_in, context_in): + """Initialize this ParseSource object.""" self._name = name_in self._type = type_in self._context = context_in @property def type(self): + """Return this source's type""" return self._type @property def name(self): + """Return this source's name""" return self._name @property def context(self): + """Return this source's context""" return self._context ######################################################################## diff --git a/scripts/parse_tools/preprocess.py b/scripts/parse_tools/preprocess.py index 616860ff..b2ef87f1 100755 --- a/scripts/parse_tools/preprocess.py +++ b/scripts/parse_tools/preprocess.py @@ -4,38 +4,42 @@ inclusion and exclusion of lines based on preprocessor symbol definitions. """ +# Python library imports import re import ast +# CCPP Framewor imports +from parse_source import ParseSyntaxError __defined_re__ = re.compile(r"defined\s+([A-Za-z0-9_]+)") ############################################################################### class PreprocError(ValueError): - "Class to report preprocessor line errors" + """Class to report preprocessor line errors""" def __init__(self, message): super(PreprocError, self).__init__(message) ######################################################################## def preproc_bool(value): - # Turn a preprocessor value into a boolean + """Turn a preprocessor value into a boolean""" if isinstance(value, bool): line_val = value else: try: ival = int(value) line_val = ival != 0 - except ValueError as ve: + except ValueError: line_val = value != "0" - # End try - # End if + # end try + # end if return line_val ######################################################################## def preproc_item_value(item, preproc_defs): - "Find the value of a preproc <item> (part of a parsed preprocessor line)" + """Find the value of a preproc <item> (part of a parsed + preprocessor line)""" value = False if isinstance(item, ast.Expr): value = preproc_item_value(item.value, preproc_defs) @@ -46,7 +50,7 @@ def preproc_item_value(item, preproc_defs): args = item.args if len(args) != 1: raise PreprocError("Invalid defined statement, {}".format(ast.dump(item))) - # End if + # end if symbol = args[0].id # defined is True as long as we know about the symbol value = symbol in preproc_defs @@ -54,82 +58,83 @@ def preproc_item_value(item, preproc_defs): args = item.args if len(args) != 1: raise PreprocError("Invalid defined statement, {}".format(ast.dump(item))) - # End if + # end if symbol = args[0].id # notdefined is True as long as we do not know about the symbol value = symbol not in preproc_defs else: raise PreprocError("Cannot parse function {}".format(func)) - # End if + # end if elif isinstance(item, ast.BoolOp): left_val = preproc_item_value(item.values[0], preproc_defs) right_val = preproc_item_value(item.values[1], preproc_defs) - op = item.op - if isinstance(op, ast.And): + oper = item.op + if isinstance(oper, ast.And): value = preproc_bool(left_val) and preproc_bool(right_val) - elif isinstance(op, ast.Or): + elif isinstance(oper, ast.Or): value = preproc_bool(left_val) or preproc_bool(right_val) else: - raise PreprocError("Unknown binary operator, {}".format(op)) - # End if + raise PreprocError("Unknown binary operator, {}".format(oper)) + # end if elif isinstance(item, ast.UnaryOp): val = preproc_item_value(item.operand, preproc_defs) - op = item.op - if isinstance(op, ast.Not): + oper = item.op + if isinstance(oper, ast.Not): value = not preproc_bool(val) else: - raise PreprocError("Unknown unary operator, {}".format(op)) - # End if + raise PreprocError("Unknown unary operator, {}".format(oper)) + # end if elif isinstance(item, ast.Compare): left_val = preproc_item_value(item.left, preproc_defs) value = True - for index in xrange(len(item.ops)): - op = item.ops[index] + for index in range(len(item.ops)): + oper = item.ops[index] rcomp = item.comparators[index] right_val = preproc_item_value(rcomp, preproc_defs) - if isinstance(op, ast.Eq): + if isinstance(oper, ast.Eq): value = value and (left_val == right_val) - elif isinstance(op, ast.NotEq): + elif isinstance(oper, ast.NotEq): value = value and (left_val != right_val) else: # What remains are numerical comparisons, use integers try: ilval = int(left_val) irval = int(right_val) - if isinstance(op, ast.Gt): + if isinstance(oper, ast.Gt): value = value and (ilval > irval) - elif isinstance(op, ast.GtE): + elif isinstance(oper, ast.GtE): value = value and (ilval >= irval) - elif isinstance(op, ast.Lt): + elif isinstance(oper, ast.Lt): value = value and (ilval < irval) - elif isinstance(op, ast.LtE): + elif isinstance(oper, ast.LtE): value = value and (ilval <= irval) else: - raise PreprocError("Unknown comparison operator, {}".format(op)) - # End if - except ValueError as ve: + emsg = "Unknown comparison operator, {}" + raise PreprocError(emsg.format(oper)) + # end if + except ValueError: value = False - # End try - # End if - # End for + # end try + # end if + # end for elif isinstance(item, ast.Name): - id = item.id - if id in preproc_defs: - value = preproc_defs[id] + id_key = item.id + if id_key in preproc_defs: + value = preproc_defs[id_key] else: - value = id - # End if + value = id_key + # end if elif isinstance(item, ast.Num): value = item.n else: raise PreprocError("Cannot parse {}".format(item)) - # End if + # end if return value ######################################################################## def parse_preproc_line(line, preproc_defs): - "Parse a preprocessor line into a tree that can be evaluated" + """Parse a preprocessor line into a tree that can be evaluated""" # Scan line and translate to python syntax inchar = None # Character context line_len = len(line) @@ -142,7 +147,7 @@ def parse_preproc_line(line, preproc_defs): elif inchar is None: inchar = line[index] # Else in character context, just copy - # End if + # end if pline = pline + line[index] elif inchar is not None: # In character context, just copy current character @@ -164,10 +169,10 @@ def parse_preproc_line(line, preproc_defs): mlen = len(match.group(0)) pline = pline + "defined ({})".format(match.group(1)) index = index + mlen - 1 - # End if - # End if + # end if + # end if index = index + 1 - # End while + # end while try: ast_line = ast.parse(pline) # We should only have one 'statement' @@ -178,17 +183,17 @@ def parse_preproc_line(line, preproc_defs): value = preproc_item_value(ast_line.body[0], preproc_defs) line_val = preproc_bool(value) success = True - # End if - except SyntaxError as se: + # end if + except SyntaxError: line_val = False success = False - # End try + # end try return line_val, success ######################################################################## class PreprocStack(object): - "Class to handle preprocess regions" + """Class to handle preprocess regions""" ifdef_re = re.compile(r"#\s*ifdef\s+(.*)") ifndef_re = re.compile(r"#\s*ifndef\s+(.*)") @@ -201,11 +206,17 @@ class PreprocStack(object): undef_re = re.compile(r"#\s*undef\s+([A-Za-z0-9_]+)") def __init__(self): + """Initialize our region stack""" self._region_stack = list() - def process_if_line(self, line, preproc_defs): - """Decide (el)?if <line> represents a True or False condition. + @staticmethod + def process_if_line(line, preproc_defs): + """Decide if (el)?if <line> represents a True or False condition. Return True iff the line evaluates to a True condition. + <preproc_defs> is a dictionary where each key is a symbol which + can be tested (e.g., 'FOO' in #ifdef FOO). The value is that + symbol's preprocessor value, if provided (e.g., 3 for -DFOO=3), + otherwise, it is None. Return second logical value of False if we are unable to process <line> >>> PreprocStack().process_if_line("#if 0", {'CCPP':1}) (False, True) @@ -273,102 +284,104 @@ def process_if_line(self, line, preproc_defs): match = PreprocStack.ifelif_re.match(line) if match is None: return False, False # This is not a preproc line - else: - value, ok = parse_preproc_line(match.group(1).strip(), preproc_defs) - return value, ok - # End if + # end if + value, okay = parse_preproc_line(match.group(1).strip(), preproc_defs) + return value, okay def process_line(self, line, preproc_defs, pobj, logger): + """Read <line> and return if it is a preprocessor line. + In addition, if it is a preprocessor line enter an appropriate region + if indicated by <preproc_defs>.""" sline = line.strip() is_preproc_line = PreprocStack.is_preproc_line(line) if is_preproc_line and (preproc_defs is not None): match = PreprocStack.ifdef_re.match(sline) if match is not None: - if match.group(1) in preproc_defs: - start_region = preproc_defs[match.group(1)] != 0 - else: - start_region = False - # End if + start_region = match.group(1) in preproc_defs if start_region and (logger is not None): - logger.debug('Preproc: Starting True region ({}) on line {}'.format(match.group(1), pobj)) - # End if + lmsg = "Preproc: Starting True region ({}) on line {}" + logger.debug(lmsg.format(match.group(1), pobj)) + # end if self.enter_region(start_region) - # End if + # end if if match is None: match = PreprocStack.ifndef_re.match(sline) if match is not None: - if match.group(1) in preproc_defs: - start_region = preproc_defs[match.group(1)] == 0 - else: - start_region = True - # End if + start_region = match.group(1) not in preproc_defs if (not start_region) and (logger is not None): - logger.debug('Preproc: Starting False region ({}) on line {}'.format(match.group(1), pobj)) - # End if + lmsg = "Preproc: Starting False region ({}) on line {}" + logger.debug(lmsg.format(match.group(1), pobj)) + # end if self.enter_region(start_region) - # End if - # End if + # end if + # end if if match is None: match = PreprocStack.if_re.match(sline) if match is not None: - line_val, success = self.process_if_line(sline, preproc_defs) + line_val, success = self.process_if_line(sline, + preproc_defs) self.enter_region(line_val) if (not success) and (logger is not None): - logger.warning("WARNING: Preprocessor #if statement not handled, at {}".format(pobj)) - # End if - # End if - # End if + lmsg = "WARNING: Preprocessor #if statement not handled, at {}" + logger.warning(lmsg.format(pobj)) + # end if + # end if + # end if if match is None: match = PreprocStack.elif_re.match(sline) if match is not None: - line_val, success = self.process_if_line(sline, preproc_defs) + line_val, success = self.process_if_line(sline, + preproc_defs) self.modify_region(line_val) if (not success) and (logger is not None): - logger.warning("WARNING: Preprocessor #elif statement not handled, at {}".format(pobj)) - # End if - # End if - # End if + lmsg = "WARNING: Preprocessor #elif statement not handled, at {}" + logger.warning(lmsg.format(pobj)) + # end if + # end if + # end if if match is None: match = PreprocStack.else_re.match(sline) if match is not None: # Always try to use True for else, modify_region will set # correct value self.modify_region(True) - # End if - # End if + # end if + # end if if match is None: match = PreprocStack.end_re.match(sline) if match is not None: - self.exit_region() - # End if - # End if + self.exit_region(pobj) + # end if + # end if if (match is None) and self.in_true_region(): match = PreprocStack.define_re.match(sline) if match is not None: # Add (or replace) a symbol to our defs preproc_defs[match.group(1)] = match.group(2) - # End if - # End if + # end if + # end if if (match is None) and self.in_true_region(): match = PreprocStack.undef_re.match(sline) if (match is not None) and (match.group(1) in preproc_defs): # Remove a symbol from our defs del preproc_defs[match.group(1)] - # End if - # End if + # end if + # end if # Ignore all other lines - # End if + # end if return is_preproc_line def enter_region(self, valid): - "Enter a new region (if, ifdef, ifndef) which may currently be valid" + """Enter a new region (if, ifdef, ifndef) which may + currently be valid""" self._region_stack.append([valid, valid]) - def exit_region(self): - "Leave the current (innermost) region" - if len(self._region_stack) == 0: - raise ParseSyntaxError("#endif found with no matching #if, #ifdef, or #ifndef", context=pobj) - # End if + def exit_region(self, pobj): + """Leave the current (innermost) region""" + if not self._region_stack: + emsg = "#endif found with no matching #if, #ifdef, or #ifndef" + raise ParseSyntaxError(emsg, context=pobj) + # end if self._region_stack.pop() def modify_region(self, valid): @@ -382,22 +395,22 @@ def modify_region(self, valid): self._region_stack.append([curr_region[0], False]) else: self._region_stack.append([curr_region[0], valid]) - # End if + # end if def in_true_region(self): - "Return True iff the current line should be processed" + """Return True iff the current line should be processed""" true_region = True for region in self._region_stack: if not region[1]: true_region = False break - # End if - # End for + # end if + # end for return true_region - @classmethod - def is_preproc_line(self, line): - 'Return True iff line appears to be a preprocessor line' + @staticmethod + def is_preproc_line(line): + """Return True iff line appears to be a preprocessor line""" return line.lstrip()[0] == '#' ######################################################################## @@ -405,4 +418,4 @@ def is_preproc_line(self, line): if __name__ == "__main__": import doctest doctest.testmod() -# End if +# end if diff --git a/scripts/parse_tools/xml_tools.py b/scripts/parse_tools/xml_tools.py new file mode 100644 index 00000000..a9c207e5 --- /dev/null +++ b/scripts/parse_tools/xml_tools.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python + +""" +Parse a host-model registry XML file and return the captured variables. +""" + +# Python library imports +from __future__ import print_function +import os +import os.path +import subprocess +import sys +import xml.etree.ElementTree as ET +sys.path.insert(0, os.path.dirname(__file__)) +# pylint: disable=wrong-import-position +try: + from distutils.spawn import find_executable + _XMLLINT = find_executable('xmllint') +except ImportError: + _XMLLINT = None +# End try +# CCPP framework imports +from parse_source import CCPPError +from parse_log import init_log, set_log_to_null +# pylint: enable=wrong-import-position + +# Find python version +PY3 = sys.version_info[0] > 2 +PYSUBVER = sys.version_info[1] +_LOGGER = None + +############################################################################### +def call_command(commands, logger, silent=False): +############################################################################### + """ + Try a command line and return the output on success (None on failure) + >>> call_command(['ls', 'really__improbable_fffilename.foo'], _LOGGER) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Execution of 'ls really__improbable_fffilename.foo' failed: + [Errno 2] No such file or directory + >>> call_command(['ls', 'really__improbable_fffilename.foo'], _LOGGER, silent=True) + False + >>> call_command(['ls'], _LOGGER) + True + """ + result = False + outstr = '' + try: + if PY3: + if PYSUBVER > 6: + cproc = subprocess.run(commands, check=True, + capture_output=True) + if not silent: + logger.debug(cproc.stdout) + # End if + result = cproc.returncode == 0 + elif PYSUBVER >= 5: + cproc = subprocess.run(commands, check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + if not silent: + logger.debug(cproc.stdout) + # End if + result = cproc.returncode == 0 + else: + raise ValueError("Python 3 must be at least version 3.5") + # End if + else: + pproc = subprocess.Popen(commands, stdin=None, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + output, _ = pproc.communicate() + if not silent: + logger.debug(output) + # End if + result = pproc.returncode == 0 + # End if + except (OSError, CCPPError, subprocess.CalledProcessError) as err: + if silent: + result = False + else: + cmd = ' '.join(commands) + emsg = "Execution of '{}' failed with code:\n" + outstr = emsg.format(cmd, err.returncode) + outstr += "{}".format(err.output) + raise CCPPError(outstr) + # End if + # End of try + return result + +############################################################################### +def find_schema_version(root): +############################################################################### + """ + Find the version of the host registry file represented by root + >>> find_schema_version(ET.fromstring('<model name="CAM" version="1.0"></model>')) + [1, 0] + >>> find_schema_version(ET.fromstring('<model name="CAM" version="1.a"></model>')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Illegal version string, '1.a' + Format must be <integer>.<integer> + >>> find_schema_version(ET.fromstring('<model name="CAM" version="0.0"></model>')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Illegal version string, '0.0' + Major version must be at least 1 + >>> find_schema_version(ET.fromstring('<model name="CAM" version="0.-1"></model>')) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CCPPError: Illegal version string, '0.0' + Minor version must be at least 0 + """ + verbits = None + if 'version' not in root.attrib: + raise CCPPError("version attribute required") + # End if + version = root.attrib['version'] + versplit = version.split('.') + try: + if len(versplit) != 2: + raise CCPPError('oops') + # End if (no else needed) + try: + verbits = [int(x) for x in versplit] + except ValueError as verr: + raise CCPPError(verr) + # End try + if verbits[0] < 1: + raise CCPPError('Major version must be at least 1') + # End if + if verbits[1] < 0: + raise CCPPError('Minor version must be non-negative') + # End if + except CCPPError as verr: + errstr = """Illegal version string, '{}' + Format must be <integer>.<integer>""" + ve_str = str(verr) + if ve_str: + errstr = ve_str + '\n' + errstr + # End if + raise CCPPError(errstr.format(version)) + # End try + return verbits + +############################################################################### +def find_schema_file(schema_root, version, schema_path=None): +############################################################################### + """Find and return the schema file based on <schema_root> and <version> + or return None. + If <schema_path> is present, use that as the directory to find the + appropriate schema file. Otherwise, just look in the current directory.""" + + verstring = '_'.join([str(x) for x in version]) + schema_filename = "{}_v{}.xsd".format(schema_root, verstring) + if schema_path: + schema_file = os.path.join(schema_path, schema_filename) + else: + schema_file = schema_filename + # End if + if os.path.exists(schema_file): + return schema_file + # End if + return None + +############################################################################### +def validate_xml_file(filename, schema_root, version, logger, + schema_path=None, error_on_noxmllint=False): +############################################################################### + """ + Find the appropriate schema and validate the XML file, <filename>, + against it using xmllint + """ + # Check the filename + if not os.path.isfile(filename): + raise CCPPError("validate_xml_file: Filename, '{}', does not exist".format(filename)) + # End if + if not os.access(filename, os.R_OK): + raise CCPPError("validate_xml_file: Cannot open '{}'".format(filename)) + # End if + if not schema_path: + # Find the schema, based on the model version + thispath = os.path.abspath(__file__) + pdir = os.path.dirname(os.path.dirname(os.path.dirname(thispath))) + schema_path = os.path.join(pdir, 'schema') + # End if + schema_file = find_schema_file(schema_root, version, schema_path) + if not (schema_file and os.path.isfile(schema_file)): + verstring = '.'.join([str(x) for x in version]) + emsg = """validate_xml_file: Cannot find schema for version {}, + {} does not exist""" + raise CCPPError(emsg.format(verstring, schema_file)) + # End if + if not os.access(schema_file, os.R_OK): + emsg = "validate_xml_file: Cannot open schema, '{}'" + raise CCPPError(emsg.format(schema_file)) + # End if + if _XMLLINT is not None: + logger.debug("Checking file {} against schema {}".format(filename, + schema_file)) + cmd = [_XMLLINT, '--noout', '--schema', schema_file, filename] + result = call_command(cmd, logger) + return result + # End if + lmsg = "xmllint not found, could not validate file {}" + if error_on_noxmllint: + raise CCPPError("validate_xml_file: " + lmsg.format(filename)) + # End if + logger.warning(lmsg.format(filename)) + return True # We could not check but still need to proceed + +############################################################################### +def read_xml_file(filename, logger=None): +############################################################################### + """Read the XML file, <filename>, and return its tree and root""" + if os.path.isfile(filename) and os.access(filename, os.R_OK): + if PY3: + file_open = (lambda x: open(x, 'r', encoding='utf-8')) + else: + file_open = (lambda x: open(x, 'r')) + # End if + with file_open(filename) as file_: + try: + tree = ET.parse(file_) + root = tree.getroot() + except ET.ParseError as perr: + emsg = "read_xml_file: Cannot read {}, {}" + raise CCPPError(emsg.format(filename, perr)) + elif not os.access(filename, os.R_OK): + raise CCPPError("read_xml_file: Cannot open '{}'".format(filename)) + else: + emsg = "read_xml_file: Filename, '{}', does not exist" + raise CCPPError(emsg.format(filename)) + # End if + if logger: + logger.debug("Read XML file, '{}'".format(filename)) + # End if + return tree, root + +############################################################################### + +if __name__ == "__main__": + _LOGGER = init_log('xml_tools') + set_log_to_null(_LOGGER) + try: + # First, run doctest + import doctest + doctest.testmod() + except CCPPError as cerr: + print("{}".format(cerr)) +# No else: diff --git a/scripts/state_machine.py b/scripts/state_machine.py new file mode 100644 index 00000000..8af4e571 --- /dev/null +++ b/scripts/state_machine.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python +# + +"""Classes and methods to implement a simple state machine.""" + +# Python library imports +import re +from collections import OrderedDict +# CCPP framework imports +from parse_tools import FORTRAN_ID + +############################################################################### + +class StateMachine(object): + """Class and methods to implement a simple state machine. + Note, a collections.UserDict would be nice here but it is not in python 2. + >>> StateMachine() + StateMachine() + >>> StateMachine([('ab','a','b','a')]) + StateMachine(ab) + >>> StateMachine([('ab','a','b','a'),('cd','c','d','c')]) + StateMachine(ab, cd) + >>> StateMachine([('ab','a','b','a')]).add_transition('cd','c','d','c') + + >>> StateMachine([('ab','a','b','a')])['cd'] = ('c','d','c') + >>> StateMachine([('ab','a','b','a'),('cd','c','d','c')]).transitions() + ['ab', 'cd'] + >>> StateMachine([('ab','a','b','a')]).initial_state('ab') + 'a' + >>> StateMachine([('ab','a','b','a')]).final_state('ab') + 'b' + >>> StateMachine([('ab','a','b','a')]).transition_regex('ab') + re.compile('a$') + >>> StateMachine([('ab','a','b','a')]).function_match('foo_a', transition='ab') + ('foo', 'a', 'ab') + >>> StateMachine([('ab','a','b',r'ax?')]).function_match('foo_a', transition='ab') + ('foo', 'a', 'ab') + >>> StateMachine([('ab','a','b',r'ax?')]).function_match('foo_ax', transition='ab') + ('foo', 'ax', 'ab') + >>> StateMachine([('ab','a','b','a')]).function_match('foo_ab', transition='ab') + (None, None, None) + >>> StateMachine([('ab','a','b','a'),('cd','c','d','c')]).function_match('foo_c') + ('foo', 'c', 'cd') + >>> StateMachine([('ab','a','b',r'ax?')]).transition_match('a') + 'ab' + >>> StateMachine([('ab','a','b',r'ax?')]).transition_match('ax') + 'ab' + >>> StateMachine([('ab','a','b',r'ax?')]).transition_match('axx') + + >>> StateMachine([('ab','a','b','a')]).transition_match('ab') + + >>> StateMachine([('ab','a','b','a'),('cd','c','d','c')]).transition_match('c') + 'cd' + >>> StateMachine((('ab','a','b','a'),)).add_transition('ab','c','d','c') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ValueError: ERROR: transition, 'ab', already exists + >>> StateMachine((('ab','a','b','a'))) #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ValueError: Invalid initial_data transition ('ab'), should be of the form (name, inital_state, final_state, regex). + >>> StateMachine([('ab','a','b','a')])['cd'] = ('c','d') #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ValueError: Invalid transition (('c', 'd')), should be of the form (inital_state, final_state, regex). + """ + + def __init__(self, initial_data=None): + """Implement a finite state machine. + <initial_data> is an iterable where each item has four elements: + (transition_name, <initial_state>, <final_state>, <transition_regex>) + <transition_regex> is a string representing allowable names for + functions which form part of the transition action. + """ + # Implement the State Transition Table as a tuple and use accessors + self.__stt__ = OrderedDict() + if initial_data is not None: + # Note that we need to add states with longer regular expressions + # before short ones so that we match correctly. + for trans in sorted(initial_data, key=lambda x: len(x[3]) if len(x) > 3 else 0, reverse=True): + if len(trans) != 4: + raise ValueError("Invalid initial_data transition ({}), should be of the form (name, inital_state, final_state, regex).".format(trans)) + # end if + self.add_transition(trans[0], trans[1], trans[2], trans[3]) + # end for + # end if + + def add_transition(self, name, init_state, final_state, regex): + """Add a transition to this state machine. + See __setitem__ for implementation details.""" + self[name] = (init_state, final_state, regex) + + def transitions(self): + """Return a list of transition names""" + return list(self.__stt__.keys()) + + def initial_state(self, transition): + """Return the initial (before) state for <transition>""" + return self.__stt__[transition][0] + + def final_state(self, transition): + """Return the final (after) state for <transition>""" + return self.__stt__[transition][1] + + def transition_regex(self, transition): + """Return the compiled regex for <transition>""" + return self.__stt__[transition][2] + + def function_regex(self, transition): + """Return the compiled functino regex for <transition>""" + return self.__stt__[transition][3] + + def transition_match(self, test_str, transition=None): + """Return the matched transition, if found. + """ + match_trans = None + if transition is None: + trans_list = self.transitions() + else: + trans_list = [transition] + # end if + for trans in trans_list: + regex = self.transition_regex(trans) + match = regex.match(test_str) + if match is not None: + match_trans = trans + break + # end if + # end for + return match_trans + + def function_match(self, test_str, transition=None): + """Return a function ID, transition identifier, and matched + transition if found. + If <transition> is None, look for a match in any transition, + otherwise, only look for a specific match to that transition. + """ + if transition is None: + trans_list = self.transitions() + else: + trans_list = [transition] + # end if + func_id = None + trans_id = None + match_trans = None + for trans in trans_list: + regex = self.function_regex(trans) + match = regex.match(test_str) + if match is not None: + func_id = match.group(1) + trans_id = match.group(2) + match_trans = trans + break + # end if + # end for + return func_id, trans_id, match_trans + + def __getitem__(self, key): + return self.__stt__[key] + + def __setitem__(self, key, value): + if key in self.__stt__: + raise ValueError("ERROR: transition, '{}', already exists".format(key)) + # end if + if len(value) != 3: + raise ValueError("Invalid transition ({}), should be of the form (inital_state, final_state, regex).".format(value)) + # end if + regex = re.compile(value[2] + r"$") + function = re.compile(FORTRAN_ID + r"_(" + value[2] + r")$") + self.__stt__[key] = (value[0], value[1], regex, function) + + def __delitem__(self, key): + del self.__stt__[key] + + def __iter__(self): + return iter(self.__stt__) + + def __len__(self): + return len(self.__stt__) + + def __str__(self): + return "StateMachine({})".format(", ".join(self.transitions())) + + def __repr__(self): + return str(self) + +############################################################################### +if __name__ == "__main__": + import doctest + doctest.testmod() diff --git a/src/ccpp_constituent_prop_mod.F90 b/src/ccpp_constituent_prop_mod.F90 new file mode 100644 index 00000000..2006b72c --- /dev/null +++ b/src/ccpp_constituent_prop_mod.F90 @@ -0,0 +1,1238 @@ +module ccpp_constituent_prop_mod + + ! ccpp_contituent_prop_mod contains types and procedures for storing + ! and retrieving constituent properties + + use ccpp_hashable, only: ccpp_hashable_t, ccpp_hashable_char_t + use ccpp_hash_table, only: ccpp_hash_table_t, ccpp_hash_iterator_t + use ccpp_kinds, only: kind_phys + + implicit none + private + + integer, parameter :: int_unassigned = -1 + real(kind_phys), parameter :: kphys_unassigned = HUGE(1.0_kind_phys) + + !!XXgoldyXX: NB: We end up with two copies of each metadata object, FIX!! + + type, public, extends(ccpp_hashable_char_t) :: ccpp_constituent_properties_t + ! A ccpp_constituent_properties_t object holds relevant metadata + ! for a constituent species and provides interfaces to access that data. + character(len=:), private, allocatable :: std_name + character(len=:), private, allocatable :: vert_dim + integer, private :: const_ind = int_unassigned + integer, private :: field_ind = int_unassigned + logical, private :: advected = .false. + contains + ! Required hashable method + procedure :: key => ccp_properties_get_key + ! Informational methods + procedure :: is_initialized => ccp_is_initialized + procedure :: standard_name => ccp_get_standard_name + procedure :: is_layer_var => ccp_is_layer_var + procedure :: is_interface_var => ccp_is_interface_var + procedure :: is_2d_var => ccp_is_2d_var + procedure :: vertical_dimension => ccp_get_vertical_dimension + procedure :: const_index => ccp_const_index + procedure :: field_index => ccp_field_index + procedure :: is_advected => ccp_is_advected + procedure :: equivalent => ccp_is_equivalent + ! Copy method (be sure to update this anytime fields are added) + procedure :: copyConstituent + generic :: assignment(=) => copyConstituent + ! Methods that change state + procedure :: initialize => ccp_initialize + procedure :: deallocate => ccp_deallocate + procedure :: set_const_index => ccp_set_const_index + procedure :: set_field_index => ccp_set_field_index + end type ccpp_constituent_properties_t + + type, public :: ccpp_model_constituents_t + ! A ccpp_model_constituents_t object holds all the metadata and field + ! data for a model run's constituents along with data and methods + ! to initialize and access the data. + integer, private :: num_layer_vars = 0 + integer, private :: num_interface_vars = 0 + integer, private :: num_2d_vars = 0 + integer, private :: num_layers = 0 + integer, private :: num_interfaces = 0 + type(ccpp_hash_table_t), private :: hash_table + logical, private :: table_locked = .false. + ! These fields are public to allow for efficient (i.e., no copying) + ! usage even though it breaks object independence + real(kind_phys), allocatable :: vars_layer(:,:,:) + real(kind_phys), allocatable :: vars_interface(:,:,:) + real(kind_phys), allocatable :: vars_2d(:,:) + type(ccpp_constituent_properties_t), allocatable :: const_metadata(:) + contains + ! Return .true. if a constituent matches pattern + procedure, private :: is_match => ccp_model_const_is_match + ! Return a constituent from the hash table + procedure, private :: find_const => ccp_model_const_find_const + ! Is the table locked (i.e., ready to be used)? + procedure :: locked => ccp_model_const_locked + ! Is it okay to add new metadata fields? + procedure :: okay_to_add => ccp_model_const_okay_to_add + ! Add a constituent's metadata to the master hash table + procedure :: new_field => ccp_model_const_add_metadata + ! Initialize hash table + procedure :: initialize_table => ccp_model_const_initialize + ! Freeze hash table and initialize constituent field arrays + procedure :: lock_table => ccp_model_const_lock + ! Empty (reset) the entire object + procedure :: reset => ccp_model_const_reset + ! Query number of constituents matching pattern + procedure :: num_constituents => ccp_model_const_num_match + ! Gather constituent fields matching pattern + !!XXgoldyXX: Might need a 2D version of this + procedure :: copy_in => ccp_model_const_copy_in_3d + ! Update constituent fields matching pattern + !!XXgoldyXX: Might need a 2D version of this + procedure :: copy_out => ccp_model_const_copy_out_3d + ! Return index of constituent matching standard name + procedure :: const_index => ccp_model_const_index + ! Return index of field matching standard name + procedure :: field_index => ccp_model_const_field_index + ! Return metadata matching standard name + procedure :: field_metada => ccp_model_const_metadata + end type ccpp_model_constituents_t + + private int_unassigned + private handle_allocate_error + +CONTAINS + + !######################################################################## + ! + ! CCPP_CONSTITUENT_PROPERTIES_T (constituent metadata) methods + ! + !######################################################################## + + subroutine copyConstituent(outConst, inConst) + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(inout) :: outConst + type(ccpp_constituent_properties_t), intent(in) :: inConst + + outConst%std_name = inConst%std_name + outConst%vert_dim = inConst%vert_dim + outConst%const_ind = inConst%const_ind + outConst%field_ind = inConst%field_ind + outConst%advected = inConst%advected + end subroutine copyConstituent + + !####################################################################### + + subroutine handle_allocate_error(astat, fieldname, errflg, errmsg) + ! Generate an error message if <astat> indicates an allocation failure + + ! Dummy arguments + integer, intent(in) :: astat + character(len=*), intent(in) :: fieldname + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (astat /= 0) then + if (present(errflg)) then + errflg = astat + end if + if (present(errmsg)) then + write(errmsg, '(4a,i0)') 'Error allocating ', & + 'ccpp_constituent_properties_t object component, ', & + trim(fieldname), ', error code = ', astat + end if + else + if (present(errflg)) then + errflg = 0 + end if + if (present(errmsg)) then + errmsg = '' + end if + end if + + end subroutine handle_allocate_error + + !####################################################################### + + function ccp_properties_get_key(hashable) + ! Return the constituent properties class key (std_name) + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: hashable + character(len=:), allocatable :: ccp_properties_get_key + + ccp_properties_get_key = hashable%std_name + + end function ccp_properties_get_key + + !####################################################################### + + logical function ccp_is_initialized(this, errflg, errmsg) + ! Return .true. iff <this> is initialized + ! If <this> is *not* initialized and <errflg> and/or <errmsg> is present, + ! fill these fields with an error status + ! If <this> *is* initialized and <errflg> and/or <errmsg> is present, + ! clear these fields. + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + ccp_is_initialized = allocated(this%std_name) + if (ccp_is_initialized) then + if (present(errflg)) then + errflg = 0 + end if + if (present(errmsg)) then + errmsg = '' + end if + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) 'ccpp_constituent_properties_t object ', & + 'is not initialized' + end if + end if + + end function ccp_is_initialized + + !####################################################################### + + subroutine ccp_initialize(this, std_name, vertical_dim, advected, & + errflg, errmsg) + ! Initialize all fields in <this> + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(inout) :: this + character(len=*), intent(in) :: std_name + character(len=*), intent(in) :: vertical_dim + logical, optional, intent(in) :: advected + integer, intent(out) :: errflg + character(len=*), intent(out) :: errmsg + ! Local variable + integer :: astat + + if (this%is_initialized()) then + errflg = 1 + write(errmsg, *) 'ccpp_constituent_properties_t object, ', & + trim(std_name), ', is already initialized as ', this%std_name + else + errflg = 0 + errmsg = '' + this%std_name = trim(std_name) + end if + if (errflg == 0) then + this%vert_dim = trim(vertical_dim) + end if + if (errflg == 0) then + if (present(advected)) then + this%advected = advected + else + this%advected = .false. + end if + end if + if (errflg /= 0) then + call this%deallocate() + end if + end subroutine ccp_initialize + + !####################################################################### + + subroutine ccp_deallocate(this) + ! Deallocate memory associated with this constituent property object + + ! Dummy argument + class(ccpp_constituent_properties_t), intent(inout) :: this + + if (allocated(this%std_name)) then + deallocate(this%std_name) + end if + if (allocated(this%vert_dim)) then + deallocate(this%vert_dim) + end if + this%field_ind = int_unassigned + this%advected = .false. + + end subroutine ccp_deallocate + + !####################################################################### + + subroutine ccp_get_standard_name(this, std_name, errflg, errmsg) + ! Return this constituent's standard name + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + character(len=*), intent(out) :: std_name + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg)) then + std_name = this%std_name + end if + end subroutine ccp_get_standard_name + + !####################################################################### + + subroutine ccp_get_vertical_dimension(this, vert_dim, errflg, errmsg) + ! Return the standard name of this constituent's vertical dimension + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + character(len=*), intent(out) :: vert_dim + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg)) then + vert_dim = this%vert_dim + end if + end subroutine ccp_get_vertical_dimension + + !####################################################################### + + logical function ccp_is_layer_var(this) result(is_layer) + ! Return .true. iff this constituent has a layer vertical dimension + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + ! Local variable + character(len=32) :: dimname + + call this%vertical_dimension(dimname) + is_layer = trim(dimname) == 'vertical_layer_dimension' + + end function ccp_is_layer_var + + !####################################################################### + + logical function ccp_is_interface_var(this) result(is_interface) + ! Return .true. iff this constituent has a interface vertical dimension + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + ! Local variable + character(len=32) :: dimname + + call this%vertical_dimension(dimname) + is_interface = trim(dimname) == 'vertical_interface_dimension' + + end function ccp_is_interface_var + + !####################################################################### + + logical function ccp_is_2d_var(this) result(is_2d) + ! Return .true. iff this constituent has a 2d vertical dimension + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + ! Local variable + character(len=32) :: dimname + + call this%vertical_dimension(dimname) + is_2d = len_trim(dimname) == 0 + + end function ccp_is_2d_var + + !####################################################################### + + integer function ccp_const_index(this, errflg, errmsg) + ! Return this constituent's master index (or -1 of not assigned) + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg)) then + ccp_const_index = this%const_ind + end if + end function ccp_const_index + + !####################################################################### + + integer function ccp_field_index(this, errflg, errmsg) + ! Return this constituent's field index (or -1 of not assigned) + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg)) then + ccp_field_index = this%field_ind + end if + + end function ccp_field_index + + !####################################################################### + + subroutine ccp_set_const_index(this, index, errflg, errmsg) + ! Set this constituent's index in the master constituent array + ! It is an error to try to set an index if it is already set + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(inout) :: this + integer, intent(in) :: index + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg)) then + if (this%const_ind /= int_unassigned) then + this%const_ind = index + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) 'ccpp_constituent_properties_t ', & + 'const index is already set' + end if + end if + end if + + end subroutine ccp_set_const_index + + !####################################################################### + + subroutine ccp_set_field_index(this, findex, errflg, errmsg) + ! Set this constituent's field index + ! It is an error to try to set an index if it is already set + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(inout) :: this + integer, intent(in) :: findex + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg)) then + if (this%field_ind == int_unassigned) then + this%field_ind = findex + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) 'ccpp_constituent_properties_t ', & + 'field index is already set' + end if + end if + end if + end subroutine ccp_set_field_index + + !####################################################################### + + logical function ccp_is_advected(this, errflg, errmsg) + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg)) then + ccp_is_advected = this%advected + end if + end function ccp_is_advected + + !####################################################################### + + logical function ccp_is_equivalent(this, oconst, & + errflg, errmsg) result(equiv) + + ! Dummy arguments + class(ccpp_constituent_properties_t), intent(in) :: this + type(ccpp_constituent_properties_t), intent(in) :: oconst + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + + if (this%is_initialized(errflg, errmsg) .and. & + oconst%is_initialized(errflg, errmsg)) then + equiv = (trim(this%std_name) == trim(oconst%std_name)) .and. & + (trim(this%vert_dim) == trim(oconst%vert_dim)) .and. & + (this%advected .eqv. oconst%advected) + else + equiv = .false. + end if + + end function ccp_is_equivalent + + !######################################################################## + ! + ! CCPP_MODEL_CONSTITUENTS_T (constituent field data) methods + ! + !######################################################################## + + logical function ccp_model_const_locked(this, errflg, errmsg, warn_func) + ! Return .true. iff <this> is locked (i.e., ready to use) + ! Optionally fill out <errflg> and <errmsg> if object not initialized + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + character(len=*), optional, intent(in) :: warn_func + ! Local variable + character(len=*), parameter :: subname = 'ccp_model_const_locked' + + if (present(errflg)) then + errflg = 0 + end if + if (present(errmsg)) then + errmsg = '' + end if + ccp_model_const_locked = .false. + ! Use an initialized hash table as double check + if (this%hash_table%is_initialized()) then + ccp_model_const_locked = this%table_locked + if ( (.not. this%table_locked) .and. & + present(errmsg) .and. present(warn_func)) then + ! Write a warning as a courtesy to calling function but do not set + ! errflg (let caller decide). + write(errmsg, *) trim(warn_func), & + ' WARNING: Model constituents not ready to use' + end if + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + if (present(warn_func)) then + write(errmsg, *) trim(warn_func), & + ' WARNING: Model constituents not initialized' + else + write(errmsg, *) subname, & + ' WARNING: Model constituents not initialized' + end if + end if + end if + + end function ccp_model_const_locked + + !######################################################################## + + logical function ccp_model_const_okay_to_add(this, errflg, errmsg, warn_func) + ! Return .true. iff <this> is initialized and not locked + ! Optionally fill out <errflg> and <errmsg> if the conditions are not met. + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(inout) :: this + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + character(len=*), optional, intent(in) :: warn_func + ! Local variable + character(len=*), parameter :: subname = 'ccp_model_const_okay_to_add' + + ccp_model_const_okay_to_add = this%hash_table%is_initialized() + if (ccp_model_const_okay_to_add) then + ccp_model_const_okay_to_add = .not. this%locked(errflg=errflg, & + errmsg=errmsg, warn_func=subname) + if (.not. ccp_model_const_okay_to_add) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + if (present(warn_func)) then + write(errmsg, *) trim(warn_func), & + ' WARNING: Model constituents are locked' + else + errmsg = subname//' WARNING: Model constituents are locked' + end if + end if + end if + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + if (present(warn_func)) then + write(errmsg, *) trim(warn_func), & + ' WARNING: Model constituents not initialized' + else + errmsg = subname//' WARNING: Model constituents not initialized' + end if + end if + end if + + end function ccp_model_const_okay_to_add + + !######################################################################## + + subroutine ccp_model_const_add_metadata(this, field_data, errflg, errmsg) + ! Add a constituent's metadata to the master hash table + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(inout) :: this + type(ccpp_constituent_properties_t), target, intent(in) :: field_data + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + character(len=256) :: error + character(len=*), parameter :: subnam = 'ccp_model_const_add_metadata' + + if (this%okay_to_add(errflg=errflg, errmsg=errmsg, warn_func=subnam)) then + error = '' +!!XXgoldyXX: Add check on key to see if incompatible item already there. + call this%hash_table%add_hash_key(field_data, error) + if (len_trim(error) > 0) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + errmsg = trim(error) + end if + else + ! If we get here we are successful, add to variable count + if (field_data%is_layer_var()) then + this%num_layer_vars = this%num_layer_vars + 1 + else if (field_data%is_interface_var()) then + this%num_interface_vars = this%num_interface_vars + 1 + else if (field_data%is_2d_var()) then + this%num_2d_vars = this%num_2d_vars + 1 + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call field_data%vertical_dimension(error, & + errflg=errflg, errmsg=errmsg) + if (len_trim(errmsg) == 0) then + write(errmsg, *) "ERROR: Unknown vertical dimension, '", & + trim(error), "'" + end if + end if + end if + end if + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + errmsg = 'ERROR: Model contituents are locked' + end if + end if + + end subroutine ccp_model_const_add_metadata + + !######################################################################## + + subroutine ccp_model_const_initialize(this, num_elements) + ! Initialize hash table, <num_elements> is total number of elements + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(inout) :: this + integer, intent(in) :: num_elements + ! Local variable + integer :: tbl_size + + ! Clear any data + this%num_layer_vars = 0 + this%num_interface_vars = 0 + this%num_2d_vars = 0 + if (allocated(this%vars_layer)) then + deallocate(this%vars_layer) + end if + if (allocated(this%vars_interface)) then + deallocate(this%vars_interface) + end if + if (allocated(this%vars_2d)) then + deallocate(this%vars_2d) + end if + if (allocated(this%const_metadata)) then + deallocate(this%const_metadata) + end if + ! Figure a log base 2 for initializing hash table + tbl_size = num_elements * 10 ! Hash padding + tbl_size = int((log(real(tbl_size, kind_phys)) / log(2.0_kind_phys)) + & + 1.0_kind_phys) + ! Initialize hash table + call this%hash_table%initialize(tbl_size) + this%table_locked = .false. + + end subroutine ccp_model_const_initialize + + !######################################################################## + + function ccp_model_const_find_const(this, standard_name, errflg, errmsg) & + result(cprop) + ! Return a constituent with key, <standard_name>, from the hash table + ! <this> must be locked to execute this function + ! Since this is a private function, error checking for locked status + ! is *not* performed. + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + character(len=*), intent(in) :: standard_name + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + type(ccpp_constituent_properties_t), pointer :: cprop + ! Local variables + class(ccpp_hashable_t), pointer :: hval + character(len=256) :: error + character(len=*), parameter :: subname = 'ccp_model_const_find_const' + + nullify(cprop) + hval => this%hash_table%table_value(standard_name, errmsg=error) + if (len_trim(error) > 0) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) subname, ': ', trim(error) + end if + else + select type(hval) + type is (ccpp_constituent_properties_t) + cprop => hval + class default + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) subname, ' ERROR: Bad hash table value', & + trim(standard_name) + end if + end select + end if + + end function ccp_model_const_find_const + + !######################################################################## + + subroutine ccp_model_const_lock(this, ncols, num_layers, num_interfaces, & + errflg, errmsg) + ! Freeze hash table and initialize constituent field arrays + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(inout) :: this + integer, intent(in) :: ncols + integer, intent(in) :: num_layers + integer, intent(in) :: num_interfaces + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + integer :: index_layer + integer :: index_interface + integer :: index_2d + integer :: index_const + integer :: astat + type(ccpp_hash_iterator_t) :: hiter + class(ccpp_hashable_t), pointer :: hval + type(ccpp_constituent_properties_t), pointer :: cprop + character(len=32) :: dimname + character(len=*), parameter :: subname = 'ccp_model_const_lock' + + if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + if (len_trim(errmsg) == 0) then + write(errmsg, *) subname, & + ' WARNING: Model constituents already locked, ignoring' + end if + end if + else + index_layer = 0 + index_interface = 0 + index_2d = 0 + index_const = 0 + ! Make sure everything is really initialized + if (allocated(this%vars_layer)) then + deallocate(this%vars_layer) + end if + if (allocated(this%vars_interface)) then + deallocate(this%vars_interface) + end if + if (allocated(this%vars_2d)) then + deallocate(this%vars_2d) + end if + if (allocated(this%const_metadata)) then + deallocate(this%const_metadata) + end if + ! Allocate the constituent array + allocate(this%const_metadata(this%hash_table%num_values()), stat=astat) + call handle_allocate_error(astat, 'const_metadata', & + errflg=errflg, errmsg=errmsg) + ! Iterate through the hash table to find entries + if (astat == 0) then + call hiter%initialize(this%hash_table) + do + if (hiter%valid()) then + index_const = index_const + 1 + if (index_const > SIZE(this%const_metadata)) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) subname, & + " ERROR: const index out of bounds" + end if + exit + end if + hval => hiter%value() + select type(hval) + type is (ccpp_constituent_properties_t) + cprop => hval + call cprop%set_const_index(index_const, & + errflg=errflg, errmsg=errmsg) + ! Figure out which type of variable this is + if (cprop%is_layer_var()) then + index_layer = index_layer + 1 + call cprop%set_field_index(index_layer, & + errflg=errflg, errmsg=errmsg) + else if (cprop%is_interface_var()) then + index_interface = index_interface + 1 + call cprop%set_field_index(index_interface, & + errflg=errflg, errmsg=errmsg) + else if (cprop%is_2d_var()) then + index_2d = index_2d + 1 + call cprop%set_field_index(index_2d, & + errflg=errflg, errmsg=errmsg) + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call cprop%vertical_dimension(dimname, & + errflg=errflg, errmsg=errmsg) + if (len_trim(errmsg) == 0) then + write(errmsg, *) subname, & + " ERROR: Bad vertical dimension, '", & + trim(dimname), "'" + end if + end if + end if + this%const_metadata(index_const) = cprop + class default + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) subname, 'ERROR: Bad hash table value' + end if + exit + end select + call hiter%next() + else + exit + end if + end do + ! Some size sanity checks + if (index_const /= this%hash_table%num_values()) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) subname, & + " ERROR: Too few constituents found in hash table" + end if + else if (index_layer /= this%num_layer_vars) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, '(2a,i0,a,i0)') subname, & + " ERROR: Wrong number of layer variables found (", & + index_layer, ") should be ", this%num_layer_vars + end if + else if (index_interface /= this%num_interface_vars) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, '(2a,i0,a,i0)') subname, & + " ERROR: Wrong number of interface variables found (", & + index_interface, ") should be ", this%num_interface_vars + end if + else if (index_2d /= this%num_2d_vars) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, '(2a,i0,a,i0)') subname, & + " ERROR: Wrong number of 2D variables found (", & + index_2d, ") should be ", this%num_2d_vars + end if + end if + ! Everything looks okay, allocate field arrays + allocate(this%vars_layer(ncols, num_layers, index_layer), & + stat=astat) + call handle_allocate_error(astat, 'vars_layer', & + errflg=errflg, errmsg=errmsg) + if (astat == 0) then + this%num_layers = num_layers + this%vars_layer = kphys_unassigned + allocate(this%vars_interface(ncols, num_interfaces, & + index_layer), stat=astat) + call handle_allocate_error(astat, 'vars_interface', & + errflg=errflg, errmsg=errmsg) + end if + if (astat == 0) then + this%num_interfaces = num_interfaces + this%vars_interface = kphys_unassigned + allocate(this%vars_2d(ncols, index_2d), stat=astat) + call handle_allocate_error(astat, 'vars_2d', & + errflg=errflg, errmsg=errmsg) + end if + if (astat == 0) then + this%vars_2d = kphys_unassigned + end if + if (present(errflg)) then + if (errflg /= 0) then + astat = 1 + end if + end if + if (astat == 0) then + this%table_locked = .true. + end if + end if + end if + + end subroutine ccp_model_const_lock + + !######################################################################## + + subroutine ccp_model_const_reset(this) + ! Empty (reset) the entire object + + ! Dummy argument + class(ccpp_model_constituents_t), intent(inout) :: this + + if (allocated(this%vars_layer)) then + deallocate(this%vars_layer) + end if + if (allocated(this%vars_interface)) then + deallocate(this%vars_interface) + end if + if (allocated(this%vars_2d)) then + deallocate(this%vars_2d) + end if + if (allocated(this%const_metadata)) then + deallocate(this%const_metadata) + end if + call this%hash_table%clear() + + end subroutine ccp_model_const_reset + + !######################################################################## + + logical function ccp_model_const_is_match(this, index, advected) & + result(is_match) + ! Return .true. iff the constituent at <index> matches a pattern + ! Each (optional) property which is present represents something + ! which is required as part of a match. + ! Since this is a private function, error checking for locked status + ! is *not* performed. + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + integer, intent(in) :: index + logical, optional, intent(in) :: advected + + ! By default, every constituent is a match + is_match = .true. + if (present(advected)) then + if (advected .neqv. this%const_metadata(index)%is_advected()) then + is_match = .false. + end if + end if + + end function ccp_model_const_is_match + + !######################################################################## + + integer function ccp_model_const_num_match(this, advected, & + errflg, errmsg) result(nmatch) + ! Query number of constituents matching pattern + ! Each (optional) property which is present represents something + ! which is required as part of a match. + ! <this> must be locked to execute this function + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + logical, optional, intent(in) :: advected + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + integer :: index + character(len=*), parameter :: subname = "ccp_model_const_num_match" + + nmatch = 0 + if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + do index = 1, SIZE(this%const_metadata) + if (this%is_match(index, advected=advected)) then + nmatch = nmatch + 1 + end if + end do + end if + + end function ccp_model_const_num_match + + !######################################################################## + + subroutine ccp_model_const_copy_in_3d(this, const_array, advected, & + errflg, errmsg) + ! Gather constituent fields matching pattern + ! Each (optional) property which is present represents something + ! which is required as part of a match. + ! <this> must be locked to execute this function + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + real(kind_phys), intent(out) :: const_array(:,:,:) + logical, optional, intent(in) :: advected + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + integer :: index ! <this> const_metadata index + integer :: cindex ! const_array index + integer :: fld_ind ! const field index + integer :: max_cind ! Size of const_array + integer :: num_levels ! Levels of const_array + character(len=64) :: std_name + character(len=*), parameter :: subname = "ccp_model_const_copy_in_3d" + + if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + cindex = 0 + max_cind = SIZE(const_array, 3) + num_levels = SIZE(const_array, 2) + do index = 1, SIZE(this%const_metadata) + if (this%is_match(index, advected=advected)) then + ! See if we have room for another constituent + cindex = cindex + 1 + if (cindex > max_cind) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) subname, & + ": Too many constituents for <const_array>" + end if + exit + end if + ! Copy this constituent's field data to <const_array> + fld_ind = this%const_metadata(index)%field_index() + if (fld_ind < 1) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call this%const_metadata(index)%standard_name(std_name) + write(errmsg, '(4a,i0,a,i0)') subname, & + ": No field index for '", trim(std_name), "'" + end if + else if (this%const_metadata(index)%is_layer_var()) then + if (this%num_layers == num_levels) then + const_array(:,:,cindex) = this%vars_layer(:,:,fld_ind) + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call this%const_metadata(index)%standard_name(std_name) + write(errmsg, '(4a,i0,a,i0)') subname, & + ": Wrong number of vertical levels for ", & + trim(std_name), ', ', num_levels, & + ', expected ', this%num_layers + end if + exit + end if + else if (this%const_metadata(index)%is_interface_var()) then + if (this%num_interfaces == num_levels) then + const_array(:,:,cindex) = this%vars_interface(:,:,fld_ind) + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call this%const_metadata(index)%standard_name(std_name) + write(errmsg, '(4a,i0,a,i0)') subname, & + ": Wrong number of vertical levels for ", & + std_name, ', ', num_levels, ', expected ', & + this%num_interfaces + end if + exit + end if + end if + end if + end do + end if + + end subroutine ccp_model_const_copy_in_3d + + !######################################################################## + + subroutine ccp_model_const_copy_out_3d(this, const_array, advected, & + errflg, errmsg) + ! Update constituent fields matching pattern + ! Each (optional) property which is present represents something + ! which is required as part of a match. + ! <this> must be locked to execute this function + + ! Dummy argument + class(ccpp_model_constituents_t), intent(inout) :: this + real(kind_phys), intent(in) :: const_array(:,:,:) + logical, optional, intent(in) :: advected + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + integer :: index ! <this> const_metadata index + integer :: cindex ! const_array index + integer :: fld_ind ! const field index + integer :: max_cind ! Size of const_array + integer :: num_levels ! Levels of const_array + character(len=64) :: std_name + character(len=*), parameter :: subname = "ccp_model_const_copy_out_3d" + + if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + cindex = 0 + max_cind = SIZE(const_array, 3) + num_levels = SIZE(const_array, 2) + do index = 1, SIZE(this%const_metadata) + if (this%is_match(index, advected=advected)) then + ! See if we have room for another constituent + cindex = cindex + 1 + if (cindex > max_cind) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + write(errmsg, *) subname, & + ": Too many constituents for <const_array>" + end if + exit + end if + ! Copy this field of to <const_array> to constituent's field data + fld_ind = this%const_metadata(index)%field_index() + if (fld_ind < 1) then + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call this%const_metadata(index)%standard_name(std_name) + write(errmsg, '(4a,i0,a,i0)') subname, & + ": No field index for '", trim(std_name), "'" + end if + else if (this%const_metadata(index)%is_layer_var()) then + if (this%num_layers == num_levels) then + this%vars_layer(:,:,fld_ind) = const_array(:,:,cindex) + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call this%const_metadata(index)%standard_name(std_name) + write(errmsg, '(4a,i0,a,i0)') subname, & + ": Wrong number of vertical levels for ", & + trim(std_name), ', ', num_levels, & + ', expected ', this%num_layers + end if + exit + end if + else if (this%const_metadata(index)%is_interface_var()) then + if (this%num_interfaces == num_levels) then + this%vars_interface(:,:,fld_ind) = const_array(:,:,cindex) + else + if (present(errflg)) then + errflg = 1 + end if + if (present(errmsg)) then + call this%const_metadata(index)%standard_name(std_name) + write(errmsg, '(4a,i0,a,i0)') subname, & + ": Wrong number of vertical levels for ", & + std_name, ', ', num_levels, ', expected ', & + this%num_interfaces + end if + exit + end if + end if + end if + end do + end if + + end subroutine ccp_model_const_copy_out_3d + + !######################################################################## + + integer function ccp_model_const_index(this, standard_name, errflg, errmsg) + ! Return index of metadata matching <standard_name>. + ! <this> must be locked to execute this function + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + character(len=*), intent(in) :: standard_name + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + type(ccpp_constituent_properties_t), pointer :: cprop + character(len=*), parameter :: subname = "ccp_model_const_index" + + if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + cprop => this%find_const(standard_name, errflg=errflg, errmsg=errmsg) + if (associated(cprop)) then + ccp_model_const_index = cprop%const_index() + else + ccp_model_const_index = int_unassigned + end if + else + ccp_model_const_index = int_unassigned + end if + + end function ccp_model_const_index + + !######################################################################## + + integer function ccp_model_const_field_index(this, standard_name, & + errflg, errmsg) + ! Return index of field matching <standard_name>. + ! <this> must be locked to execute this function + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + character(len=*), intent(in) :: standard_name + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + type(ccpp_constituent_properties_t), pointer :: cprop + character(len=*), parameter :: subname = "ccp_model_field_index" + + if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + cprop => this%find_const(standard_name, errflg=errflg, errmsg=errmsg) + if (associated(cprop)) then + ccp_model_const_field_index = cprop%field_index() + else + ccp_model_const_field_index = int_unassigned + end if + else + ccp_model_const_field_index = int_unassigned + end if + + end function ccp_model_const_field_index + + !######################################################################## + + subroutine ccp_model_const_metadata(this, standard_name, const_data, & + errflg, errmsg) + ! Return metadata matching standard name + ! <this> must be locked to execute this function + + ! Dummy arguments + class(ccpp_model_constituents_t), intent(in) :: this + character(len=*), intent(in) :: standard_name + type(ccpp_constituent_properties_t), intent(out) :: const_data + integer, optional, intent(out) :: errflg + character(len=*), optional, intent(out) :: errmsg + ! Local variables + type(ccpp_constituent_properties_t), pointer :: cprop + character(len=*), parameter :: subname = "ccp_model_const_metadata" + + if (this%locked(errflg=errflg, errmsg=errmsg, warn_func=subname)) then + cprop => this%find_const(standard_name, errflg=errflg, errmsg=errmsg) + if (associated(cprop)) then + const_data = cprop + end if + end if + + end subroutine ccp_model_const_metadata + +end module ccpp_constituent_prop_mod diff --git a/src/ccpp_hash_table.F90 b/src/ccpp_hash_table.F90 new file mode 100644 index 00000000..147ca5f0 --- /dev/null +++ b/src/ccpp_hash_table.F90 @@ -0,0 +1,520 @@ +!!XXgoldyXX: To do, statistics output +module ccpp_hash_table + + use ccpp_hashable, only: ccpp_hashable_t + + implicit none + private + + ! + ! Constants used in hashing function gen_hash_key. + ! + + integer, parameter :: gen_hash_key_offset = 21467 ! z'000053db' + + integer, parameter :: tbl_max_idx = 15 + integer, parameter, dimension(0:tbl_max_idx) :: tbl_gen_hash_key = & + (/ 61, 59, 53, 47, 43, 41, 37, 31, 29, 23, 17, 13, 11, 7, 3, 1 /) + + integer, parameter :: table_factor_size = 8 ! Table size / # entries + integer, parameter :: table_overflow_factor = 4 ! # entries / Overflow size + + type :: table_entry_t + ! Any table entry contains a key and a value + class(ccpp_hashable_t), pointer :: entry_value => NULL() + type(table_entry_t), pointer :: next => NULL() + contains + final :: finalize_table_entry + end type table_entry_t + + type, public :: ccpp_hash_table_t + ! ccpp_hash_table_t contains all information to build and use a hash table + ! It also keeps track of statistics such as collision frequency and size + integer, private :: table_size = -1 + integer, private :: key_offset = gen_hash_key_offset + type(table_entry_t), private, allocatable :: table(:) + ! Statistics + integer, private :: num_keys = 0 + integer, private :: num_key_collisions = 0 + integer, private :: max_collision = 0 + contains + procedure :: is_initialized => hash_table_is_initialized + procedure :: initialize => hash_table_initialize_table + procedure :: key_hash => hash_table_key_hash + procedure :: add_hash_key => hash_table_add_hash_key + procedure :: table_value => hash_table_table_value + procedure :: num_values => hash_table_num_values + procedure :: clear => hash_table_clear_table + end type ccpp_hash_table_t + + type, public :: ccpp_hash_iterator_t + ! ccpp_hash_iterator contains information allowing iteration through all + ! entries in a hash table + integer, private :: index = 0 + type(table_entry_t), private, pointer :: table_entry => NULL() + type(ccpp_hash_table_t), private, pointer :: hash_table => NULL() + contains + procedure :: initialize => hash_iterator_initialize + procedure :: key => hash_iterator_key + procedure :: next => hash_iterator_next_entry + procedure :: valid => hash_iterator_is_valid + procedure :: value => hash_iterator_value + end type ccpp_hash_iterator_t + + !! Private interfaces + private :: have_error ! Has a called routine detected an error? + private :: clear_optstring ! Clear a string, if present + +CONTAINS + + !####################################################################### + ! + ! Hash table methods + ! + !####################################################################### + + logical function have_error(errmsg) + ! Return .true. iff <errmsg> is present and contains text + + ! Dummy argument + character(len=*), optional, intent(in) :: errmsg + + have_error = present(errmsg) + if (have_error) then + have_error = len_trim(errmsg) > 0 + end if + end function have_error + + !####################################################################### + + subroutine clear_optstring(str) + ! clear <str> if it is present + + ! Dummy argument + character(len=*), optional, intent(inout) :: str + + if (present(str)) then + str = '' + end if + end subroutine clear_optstring + + !####################################################################### + + elemental subroutine finalize_table_entry(te) + + ! Dummy argument + type(table_entry_t), intent(inout) :: te + ! Local variable + type(table_entry_t), pointer :: temp + + if (associated(te%entry_value)) then + nullify(te%entry_value) ! We may not own the memory + temp => te%next + nullify(te%next) + if (associated(temp)) then + deallocate(temp) + nullify(temp) + end if + end if + + end subroutine finalize_table_entry + + !####################################################################### + + logical function hash_table_is_initialized(this) + ! Return .true. iff <this> is an initialized hash table + + ! Dummy argument + class(ccpp_hash_table_t) :: this + + hash_table_is_initialized = allocated(this%table) + + end function hash_table_is_initialized + + !####################################################################### + + subroutine hash_table_initialize_table(this, tbl_size, key_off) + ! Initialize this table. + + ! Dummy arguments + class(ccpp_hash_table_t) :: this + integer, intent(in) :: tbl_size ! new table size + integer, optional, intent(in) :: key_off ! key offset + + ! Clear this table so it can be initialized + if (allocated(this%table)) then + deallocate(this%table) + end if + this%num_keys = 0 + this%num_key_collisions = 0 + this%max_collision = 0 + ! Avoid too-large tables + this%table_size = ishft(1, MIN(tbl_size, bit_size(1) - 2)) + allocate(this%table(this%table_size)) + if (present(key_off)) then + this%key_offset = key_off + end if + end subroutine hash_table_initialize_table + + !####################################################################### + + integer function hash_table_key_hash(this, string, errmsg) result(hash_key) + ! + !----------------------------------------------------------------------- + ! + ! Purpose: Generate a hash key on the interval [0 .. tbl_hash_pri_sz-1] + ! given a character string. + ! + ! Algorithm is a variant of perl's internal hashing function. + ! + !----------------------------------------------------------------------- + ! + ! + ! Dummy Arguments: + ! + class(ccpp_hash_table_t) :: this + character(len=*), intent(in) :: string + character(len=*), optional, intent(out) :: errmsg + character(len=*), parameter :: subname = 'HASH_TABLE_KEY_HASH' + ! + ! Local. + ! + integer :: hash + integer :: index + integer :: ind_fact + integer :: hash_fact + + hash = this%key_offset + ind_fact = 0 + do index = 1, len_trim(string) + ind_fact = ind_fact + 1 + if (ind_fact > tbl_max_idx) then + ind_fact = 1 + end if + hash_fact = tbl_gen_hash_key(ind_fact) + hash = ieor(hash, (ichar(string(index:index)) * hash_fact)) + end do + + hash_key = iand(hash, this%table_size - 1) + 1 + if ((hash_key < 1) .or. (hash_key > this%table_size)) then + if (present(errmsg)) then + write(errmsg, '(2a,2(i0,a))') subname, ' ERROR: Key Hash, ', & + hash_key, ' out of bounds, [1, ', this%table_size, ']' + else + write(6, '(2a,2(i0,a))') subname, ' ERROR: Key Hash, ', & + hash_key, ' out of bounds, [1, ', this%table_size, ']' + STOP 1 + end if + end if + + end function hash_table_key_hash + + !####################################################################### + + function hash_table_table_value(this, key, errmsg) result(tbl_val) + ! + !----------------------------------------------------------------------- + ! + ! Purpose: Return the the key value of <key> + ! + ! If the object is not found, return NULL + ! + !----------------------------------------------------------------------- + ! + ! Dummy Arguments: + ! + class(ccpp_hash_table_t) :: this + character(len=*), intent(in) :: key + character(len=*), optional, intent(out) :: errmsg + class(ccpp_hashable_t), pointer :: tbl_val + ! + ! Local. + ! + integer :: hash_key + type(table_entry_t), pointer :: next_ptr + character(len=*), parameter :: subname = 'HASH_TABLE_TABLE_INDEX' + + call clear_optstring(errmsg) + nullify(tbl_val) + hash_key = this%key_hash(key, errmsg=errmsg) + if (have_error(errmsg)) then + errmsg = trim(errmsg)//', called from '//subname + else if (associated(this%table(hash_key)%entry_value)) then + if (this%table(hash_key)%entry_value%key() == trim(key)) then + tbl_val => this%table(hash_key)%entry_value + else + next_ptr => this%table(hash_key)%next + do + if (associated(next_ptr)) then + if (associated(next_ptr%entry_value)) then + if (next_ptr%entry_value%key() == trim(key)) then + tbl_val => next_ptr%entry_value + exit + end if + end if + next_ptr => next_ptr%next + else + exit + end if + end do + end if + end if + + if ((.not. associated(tbl_val)) .and. present(errmsg)) then + if (.not. have_error(errmsg)) then ! Still need to test for empty + write(errmsg, *) subname, ": No entry for '", trim(key), "'" + end if + end if + + end function hash_table_table_value + + !####################################################################### + + subroutine hash_table_add_hash_key(this, newval, errmsg) + ! + !----------------------------------------------------------------------- + ! + ! Purpose: Add <newval> to this hash table using its key + ! Its key must not be an empty string + ! It is an error to try to add a key more than once + ! + ! + !----------------------------------------------------------------------- + + ! Dummy arguments: + class(ccpp_hash_table_t) :: this + class(ccpp_hashable_t), target :: newval + character(len=*), optional, intent(out) :: errmsg + ! Local variables + integer :: hash_ind + integer :: ovflw_len + character(len=:), allocatable :: newkey + type(table_entry_t), pointer :: next_ptr + type(table_entry_t), pointer :: new_entry + character(len=*), parameter :: subname = 'HASH_TABLE_ADD_HASH_KEY' + + call clear_optstring(errmsg) + nullify(new_entry) + newkey = newval%key() + hash_ind = this%key_hash(newkey, errmsg=errmsg) + ! Check for this entry + if (have_error(errmsg)) then + errmsg = trim(errmsg)//', called from '//subname + else if (associated(this%table_value(newkey))) then + if (present(errmsg)) then + write(errmsg, *) subname, " ERROR: key, '", newkey, & + "' already in table" + end if + else + if (associated(this%table(hash_ind)%entry_value)) then + ! We have a collision, make a new entry + allocate(new_entry) + new_entry%entry_value => newval + ! Now, find a spot + if (associated(this%table(hash_ind)%next)) then + ovflw_len = 1 + next_ptr => this%table(hash_ind)%next + do + if (associated(next_ptr%next)) then + ovflw_len = ovflw_len + 1 + next_ptr => next_ptr%next + else + exit + end if + end do + ovflw_len = ovflw_len + 1 + next_ptr%next => new_entry + else + this%num_key_collisions = this%num_key_collisions + 1 + this%table(hash_ind)%next => new_entry + ovflw_len = 1 + end if + nullify(new_entry) + this%max_collision = MAX(this%max_collision, ovflw_len) + else + this%table(hash_ind)%entry_value => newval + end if + this%num_keys = this%num_keys + 1 + end if + + end subroutine hash_table_add_hash_key + + !####################################################################### + + integer function hash_table_num_values(this) result(numval) + ! + !----------------------------------------------------------------------- + ! + ! Purpose: Return the number of populated table values + ! + !----------------------------------------------------------------------- + + ! Dummy argument: + class(ccpp_hash_table_t) :: this + + numval = this%num_keys + + end function hash_table_num_values + + !####################################################################### + + subroutine hash_table_clear_table(this) + ! + !----------------------------------------------------------------------- + ! + ! Purpose: Deallocate the hash table and all of its entries + ! + !----------------------------------------------------------------------- + + ! Dummy argument: + class(ccpp_hash_table_t) :: this + + ! Clear all the table entries + if (this%is_initialized()) then + if (allocated(this%table)) then + ! This should deallocate the entire chain of entries + deallocate(this%table) + end if + end if + this%table_size = -1 + this%num_keys = 0 + this%num_key_collisions = 0 + this%max_collision = 0 + + end subroutine hash_table_clear_table + + !####################################################################### + ! + ! Hash iterator methods + ! + !####################################################################### + + subroutine hash_iterator_initialize(this, hash_table) + ! Initialize a hash_table iterator to the first value in the hash table + ! Note that the table_entry pointer is only used for the "next" field + ! in the hash table (entry itself is not a pointer). + + ! Dummy arguments + class(ccpp_hash_iterator_t) :: this + class(ccpp_hash_table_t), target :: hash_table + + this%hash_table => hash_table + this%index = 0 + nullify(this%table_entry) + do + this%index = this%index + 1 + if (associated(hash_table%table(this%index)%entry_value)) then + exit + else if (this%index > hash_table%table_size) then + this%index = 0 + end if + end do + end subroutine hash_iterator_initialize + + !####################################################################### + + function hash_iterator_key(this) result(key) + ! Return the key for this hash iterator entry + + ! Dummy arguments + class(ccpp_hash_iterator_t) :: this + character(len=:), allocatable :: key + + if (this%valid()) then + if (associated(this%table_entry)) then + key = this%table_entry%entry_value%key() + else + key = this%hash_table%table(this%index)%entry_value%key() + end if + else + key = '' + end if + + end function hash_iterator_key + + !####################################################################### + + subroutine hash_iterator_next_entry(this) + ! Set the iterator to the next valid hash table value + + ! Dummy argument + class(ccpp_hash_iterator_t) :: this + ! Local variable + logical :: has_table_entry + logical :: has_table_next + + if (this%index > 0) then + ! We have initialized this table, so look for next entry + has_table_entry = associated(this%table_entry) + if (has_table_entry) then + has_table_next = associated(this%table_entry%next) + else + has_table_next = .false. + end if + if (has_table_next) then + this%table_entry => this%table_entry%next + else if ((.not. has_table_entry) .and. & + associated(this%hash_table%table(this%index)%next)) then + this%table_entry => this%hash_table%table(this%index)%next + else + do + if (this%index >= this%hash_table%table_size) then + this%index = 0 + nullify(this%table_entry) + exit + else + this%index = this%index + 1 + nullify(this%table_entry) + ASSOCIATE(t_entry => this%hash_table%table(this%index)) + if (associated(t_entry%entry_value)) then + exit + end if + END ASSOCIATE + end if + end do + end if + else + ! This is an invalid iterator state + nullify(this%table_entry) + end if + + end subroutine hash_iterator_next_entry + + !####################################################################### + + logical function hash_iterator_is_valid(this) result(valid) + ! Return .true. iff this iterator is in a valid (active entry) state + + ! Dummy arguments + class(ccpp_hash_iterator_t) :: this + + valid = .false. + if ( (this%index > 0) .and. & + (this%index <= this%hash_table%table_size)) then + valid = .true. + end if + + end function hash_iterator_is_valid + + !####################################################################### + + function hash_iterator_value(this) result(val) + ! Return the value or this hash iterator entry + + ! Dummy arguments + class(ccpp_hash_iterator_t) :: this + class(ccpp_hashable_t), pointer :: val + + if (this%valid()) then + if (associated(this%table_entry)) then + val => this%table_entry%entry_value + else + val => this%hash_table%table(this%index)%entry_value + end if + else + nullify(val) + end if + + end function hash_iterator_value + +end module ccpp_hash_table diff --git a/src/ccpp_hashable.F90 b/src/ccpp_hashable.F90 new file mode 100644 index 00000000..fc2399b7 --- /dev/null +++ b/src/ccpp_hashable.F90 @@ -0,0 +1,98 @@ +module ccpp_hashable + + implicit none + private + + ! Public interfaces + public :: new_hashable_char + public :: new_hashable_int + + type, abstract, public :: ccpp_hashable_t + ! The hashable type is a base type that contains a hash key. + contains + procedure(ccpp_hashable_get_key), deferred :: key + end type ccpp_hashable_t + + type, public, extends(ccpp_hashable_t) :: ccpp_hashable_char_t + character(len=:), private, allocatable :: name + contains + procedure :: key => ccpp_hashable_char_get_key + end type ccpp_hashable_char_t + + type, public, extends(ccpp_hashable_t) :: ccpp_hashable_int_t + integer, private :: value + contains + procedure :: key => ccpp_hashable_int_get_key + procedure :: val => ccpp_hashable_int_get_val + end type ccpp_hashable_int_t + + ! Abstract interface for key procedure of ccpp_hashable_t class + abstract interface + function ccpp_hashable_get_key(hashable) + import :: ccpp_hashable_t + class(ccpp_hashable_t), intent(in) :: hashable + character(len=:), allocatable :: ccpp_hashable_get_key + end function ccpp_hashable_get_key + end interface + +CONTAINS + + !####################################################################### + + subroutine new_hashable_char(name_in, new_obj) + character(len=*), intent(in) :: name_in + type(ccpp_hashable_char_t), pointer :: new_obj + + if (associated(new_obj)) then + deallocate(new_obj) + end if + allocate(new_obj) + new_obj%name = name_in + end subroutine new_hashable_char + + !####################################################################### + + function ccpp_hashable_char_get_key(hashable) + ! Return the hashable char class key (name) + class(ccpp_hashable_char_t), intent(in) :: hashable + character(len=:), allocatable :: ccpp_hashable_char_get_key + + ccpp_hashable_char_get_key = hashable%name + end function ccpp_hashable_char_get_key + + !####################################################################### + + subroutine new_hashable_int(val_in, new_obj) + integer, intent(in) :: val_in + type(ccpp_hashable_int_t), pointer :: new_obj + + if (associated(new_obj)) then + deallocate(new_obj) + end if + allocate(new_obj) + new_obj%value = val_in + end subroutine new_hashable_int + + !####################################################################### + + function ccpp_hashable_int_get_key(hashable) + ! Return the hashable int class key (value ==> string) + class(ccpp_hashable_int_t), intent(in) :: hashable + character(len=:), allocatable :: ccpp_hashable_int_get_key + + character(len=32) :: key_str + + write(key_str, '(i0)') hashable%val() + ccpp_hashable_int_get_key = trim(key_str) + end function ccpp_hashable_int_get_key + + !####################################################################### + + integer function ccpp_hashable_int_get_val(hashable) + ! Return the hashable int class value + class(ccpp_hashable_int_t), intent(in) :: hashable + + ccpp_hashable_int_get_val = hashable%value + end function ccpp_hashable_int_get_val + +end module ccpp_hashable diff --git a/src/ccpp_types.F90 b/src/ccpp_types.F90 index 10bb3e84..992646bd 100644 --- a/src/ccpp_types.F90 +++ b/src/ccpp_types.F90 @@ -26,7 +26,10 @@ module ccpp_types implicit none private - public :: ccpp_t + public :: ccpp_t, one + + !> @var Definition of constant one + integer, parameter :: one = 1 !> @var The default loop counter indicating outside of a subcycle loop integer, parameter :: CCPP_DEFAULT_LOOP_CNT = -999 diff --git a/src/ccpp_types.meta b/src/ccpp_types.meta index fa337801..388777cc 100644 --- a/src/ccpp_types.meta +++ b/src/ccpp_types.meta @@ -1,18 +1,3 @@ -[ccpp-table-properties] - name = ccpp_types - type = module - dependencies = - -[ccpp-arg-table] - name = ccpp_types - type = module -[ccpp_t] - standard_name = ccpp_t - long_name = definition of type ccpp_t - units = DDT - dimensions = () - type = ccpp_t - ######################################################################## [ccpp-table-properties] name = ccpp_t @@ -59,3 +44,26 @@ units = index dimensions = () type = integer + +######################################################################## + +[ccpp-table-properties] + name = ccpp_types + type = module + dependencies = + +[ccpp-arg-table] + name = ccpp_types + type = module +[ccpp_t] + standard_name = ccpp_t + long_name = definition of type ccpp_t + units = DDT + dimensions = () + type = ccpp_t +[one] + standard_name = ccpp_constant_one + long_name = definition of constant one + units = none + dimensions = () + type = integer diff --git a/test/.pylintrc b/test/.pylintrc new file mode 100644 index 00000000..b380843f --- /dev/null +++ b/test/.pylintrc @@ -0,0 +1,466 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=C0330,too-many-lines,too-many-public-methods,too-many-locals,too-many-arguments,too-many-instance-attributes,unnecessary-pass + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=15 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=optparse.Values,sys.exit + + +[BASIC] + +# Naming style matching correct argument names +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style +#argument-rgx= + +# Naming style matching correct attribute names +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo, + bar, + baz, + qux, + toto, + tutu, + tata + +# Naming style matching correct class attribute names +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style +#class-attribute-rgx= + +# Naming style matching correct class names +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming-style +#class-rgx= + +# Naming style matching correct constant names +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma +good-names=_ + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Naming style matching correct inline iteration names +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style +#inlinevar-rgx= + +# Naming style matching correct method names +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style +#method-rgx= + +# Naming style matching correct module names +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style +#variable-rgx= + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=(^\s*(# )?<?https?://\S+>?$)|(^\s*>>> .*$)||(^\s*CCPPError:) + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=0 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module +max-module-lines=2000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,io,builtins + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=10 + +# Maximum number of attributes for a class (see R0902). +max-attributes=10 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=35 + +# Maximum number of locals for function / method body +max-locals=25 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=150 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub, + TERMIOS, + Bastion, + rexec + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/test/advection_test/CMakeLists.txt b/test/advection_test/CMakeLists.txt new file mode 100644 index 00000000..10ada283 --- /dev/null +++ b/test/advection_test/CMakeLists.txt @@ -0,0 +1,187 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +PROJECT(test_host) +ENABLE_LANGUAGE(Fortran) + +include(CMakeForceCompiler) + +SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}/cmake/modules) + +#------------------------------------------------------------------------------ +# +# Set where the CCPP Framework lives +# +#------------------------------------------------------------------------------ +get_filename_component(TEST_ROOT "${CMAKE_SOURCE_DIR}" DIRECTORY) +get_filename_component(CCPP_ROOT "${TEST_ROOT}" DIRECTORY) +#------------------------------------------------------------------------------ +# +# Create list of SCHEME_FILES, HOST_FILES, and SUITE_FILES +# Paths should be relative to CMAKE_SOURCE_DIR (this file's directory) +# +#------------------------------------------------------------------------------ +LIST(APPEND SCHEME_FILES "cld_suite_files.txt") +LIST(APPEND HOST_FILES "test_host_data" "test_host_mod") +LIST(APPEND SUITE_FILES "cld_suite.xml") +# HOST is the name of the executable we will build. +# We assume there are files ${HOST}.meta and ${HOST}.F90 in CMAKE_SOURCE_DIR +SET(HOST "${CMAKE_PROJECT_NAME}") + +#------------------------------------------------------------------------------ +# +# End of project-specific input +# +#------------------------------------------------------------------------------ + +# By default, no verbose output +SET(VERBOSITY 0 CACHE STRING "Verbosity level of output (default: 0)") +# By default, generated caps go in ccpp subdir +SET(CCPP_CAP_FILES "${CMAKE_BINARY_DIR}/ccpp" CACHE + STRING "Location of CCPP-generated cap files") + +SET(CCPP_FRAMEWORK ${CCPP_ROOT}/scripts) + +# Use rpaths on MacOSX +set(CMAKE_MACOSX_RPATH 1) + +#------------------------------------------------------------------------------ +# Set a default build type if none was specified +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + #message(STATUS "Setting build type to 'Debug' as none was specified.") + #set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build." FORCE) + message(STATUS "Setting build type to 'Release' as none was specified.") + set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) + + # Set the possible values of build type for cmake-gui + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + "MinSizeRel" "RelWithDebInfo") +endif() + +ADD_COMPILE_OPTIONS(-O0) + +if (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") +# gfortran +# MESSAGE("gfortran being used.") + ADD_COMPILE_OPTIONS(-fcheck=all) + ADD_COMPILE_OPTIONS(-fbacktrace) + ADD_COMPILE_OPTIONS(-ffpe-trap=zero) + ADD_COMPILE_OPTIONS(-finit-real=nan) + ADD_COMPILE_OPTIONS(-ggdb) + ADD_COMPILE_OPTIONS(-ffree-line-length-none) + ADD_COMPILE_OPTIONS(-cpp) +elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "Intel") +# ifort +# MESSAGE("ifort being used.") + #ADD_COMPILE_OPTIONS(-check all) + ADD_COMPILE_OPTIONS(-fpe0) + ADD_COMPILE_OPTIONS(-warn) + ADD_COMPILE_OPTIONS(-traceback) + ADD_COMPILE_OPTIONS(-debug extended) + ADD_COMPILE_OPTIONS(-fpp) +elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "PGI") +# pgf90 +# MESSAGE("pgf90 being used.") + ADD_COMPILE_OPTIONS(-g) + ADD_COMPILE_OPTIONS(-Mipa=noconst) + ADD_COMPILE_OPTIONS(-traceback) + ADD_COMPILE_OPTIONS(-Mfree) + ADD_COMPILE_OPTIONS(-Mfptrap) + ADD_COMPILE_OPTIONS(-Mpreprocess) +else (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + message (WARNING "This program has only been compiled with gfortran, pgf90 and ifort. If another compiler is needed, the appropriate flags SHOULD be added in ${CMAKE_SOURCE_DIR}/CMakeLists.txt") +endif (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + +#------------------------------------------------------------------------------ +# CMake Modules +# Set the CMake module path +list(APPEND CMAKE_MODULE_PATH "${CCPP_FRAMEWORK}/cmake") +#------------------------------------------------------------------------------ +# Set OpenMP flags for C/C++/Fortran +if (OPENMP) + include(detect_openmp) + detect_openmp() + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") + set (CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${OpenMP_Fortran_FLAGS}") + message(STATUS "Enable OpenMP support for C/C++/Fortran compiler") +else(OPENMP) + message (STATUS "Disable OpenMP support for C/C++/Fortran compiler") +endif() + +# Create metadata and source file lists +FOREACH(FILE ${SCHEME_FILES}) + FILE(STRINGS ${FILE} FILENAMES) + LIST(APPEND SCHEME_FILENAMES ${FILENAMES}) +ENDFOREACH(FILE) +string(REPLACE ";" "," SCHEME_METADATA "${SCHEME_FILES}") + +FOREACH(FILE ${SCHEME_FILENAMES}) + # target_sources prefers absolute pathnames + string(REPLACE ".meta" ".F90" TEMP "${FILE}") + get_filename_component(ABS_PATH "${TEMP}" ABSOLUTE) + list(APPEND LIBRARY_LIST ${ABS_PATH}) +ENDFOREACH(FILE) + +FOREACH(FILE ${HOST_FILES}) + LIST(APPEND HOST_METADATA "${FILE}.meta") + # target_sources prefers absolute pathnames + get_filename_component(ABS_PATH "${FILE}.F90" ABSOLUTE) + LIST(APPEND HOST_SOURCE "${ABS_PATH}") +ENDFOREACH(FILE) +list(APPEND LIBRARY_LIST ${HOST_SOURCE}) +string(REPLACE ";" ".meta," HOST_METADATA "${HOST_FILES}") +set(HOST_METADATA "${HOST_METADATA}.meta,${HOST}.meta") + +string(REPLACE ";" "," SUITE_XML "${SUITE_FILES}") + +# Run ccpp_capgen +set(CAPGEN_CMD "${CCPP_FRAMEWORK}/ccpp_capgen.py") +list(APPEND CAPGEN_CMD "--host-files") +list(APPEND CAPGEN_CMD "${HOST_METADATA}") +list(APPEND CAPGEN_CMD "--scheme-files") +list(APPEND CAPGEN_CMD "${SCHEME_METADATA}") +list(APPEND CAPGEN_CMD "--suites") +list(APPEND CAPGEN_CMD "${SUITE_XML}") +list(APPEND CAPGEN_CMD "--host-name") +list(APPEND CAPGEN_CMD "test_host") +list(APPEND CAPGEN_CMD "--output-root") +list(APPEND CAPGEN_CMD "${CCPP_CAP_FILES}") +while (VERBOSITY GREATER 0) + list(APPEND CAPGEN_CMD "--verbose") + MATH(EXPR VERBOSITY "${VERBOSITY} - 1") +endwhile () +string(REPLACE ";" " " CAPGEN_STRING "${CAPGEN_CMD}") +MESSAGE(STATUS "Running: ${CAPGEN_STRING}") +EXECUTE_PROCESS(COMMAND ${CAPGEN_CMD} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE CAPGEN_OUT ERROR_VARIABLE CAPGEN_OUT RESULT_VARIABLE RES) +MESSAGE(STATUS "${CAPGEN_OUT}") +if (RES EQUAL 0) + MESSAGE(STATUS "CCPP cap generation completed") +else(RES EQUAL 0) + MESSAGE(FATAL_ERROR "CCPP cap generation FAILED: result = ${RES}") +endif(RES EQUAL 0) + +# Retrieve the list of files from datatable.xml and set to CCPP_CAPS +set(DTABLE_CMD "${CCPP_FRAMEWORK}/ccpp_datafile.py") +list(APPEND DTABLE_CMD "${CCPP_CAP_FILES}/datatable.xml") +list(APPEND DTABLE_CMD "--ccpp-files") +list(APPEND DTABLE_CMD "--separator=\\;") +string(REPLACE ";" " " DTABLE_STRING "${DTABLE_CMD}") +MESSAGE(STATUS "Running: ${DTABLE_STRING}") +EXECUTE_PROCESS(COMMAND ${DTABLE_CMD} OUTPUT_VARIABLE CCPP_CAPS + RESULT_VARIABLE RES + OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE) +message(STATUS "CCPP_CAPS = ${CCPP_CAPS}") +if (RES EQUAL 0) + MESSAGE(STATUS "CCPP cap files retrieved") +else(RES EQUAL 0) + MESSAGE(FATAL_ERROR "CCPP cap file retrieval FAILED: result = ${RES}") +endif(RES EQUAL 0) +list(APPEND LIBRARY_LIST ${CCPP_CAPS}) +add_library(TESTLIB OBJECT ${LIBRARY_LIST}) +ADD_EXECUTABLE(${HOST} ${HOST}.F90 $<TARGET_OBJECTS:TESTLIB>) + +INCLUDE_DIRECTORIES(${CCPP_CAP_FILES}) + +set_target_properties(${HOST} PROPERTIES + COMPILE_FLAGS "${CMAKE_Fortran_FLAGS}" + LINK_FLAGS "${CMAKE_Fortran_FLAGS}") diff --git a/test/advection_test/cld_ice.F90 b/test/advection_test/cld_ice.F90 new file mode 100644 index 00000000..86881f7f --- /dev/null +++ b/test/advection_test/cld_ice.F90 @@ -0,0 +1,74 @@ +! Test parameterization with advected species +! + +MODULE cld_ice + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: cld_ice_init + PUBLIC :: cld_ice_run + + real(kind_phys), private :: tcld = HUGE(1.0_kind_phys) + +CONTAINS + + !> \section arg_table_cld_ice_run Argument Table + !! \htmlinclude arg_table_cld_ice_run.html + !! + subroutine cld_ice_run(ncol, timestep, temp, qv, ps, cld_ice, & + errmsg, errflg) + + integer, intent(in) :: ncol + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: temp(:,:) + real(kind_phys), intent(inout) :: qv(:,:) + real(kind_phys), intent(in) :: ps(:) + REAL(kind_phys), intent(inout) :: cld_ice(:,:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: icol + integer :: ilev + real(kind_phys) :: frz + + errmsg = '' + errflg = 0 + + ! Apply state-of-the-art thermodynamics :) + do icol = 1, ncol + do ilev = 1, size(temp, 2) + if (temp(icol, ilev) < tcld) then + frz = MAX(qv(icol, ilev) - 0.5_kind_phys, 0.0_kind_phys) + cld_ice(icol, ilev) = cld_ice(icol, ilev) + frz + qv(icol, ilev) = qv(icol, ilev) - frz + if (frz > 0.0_kind_phys) then + temp(icol, ilev) = temp(icol, ilev) + 1.0_kind_phys + end if + end if + end do + end do + + END SUBROUTINE cld_ice_run + + !> \section arg_table_cld_ice_init Argument Table + !! \htmlinclude arg_table_cld_ice_init.html + !! + subroutine cld_ice_init(tfreeze, cld_ice, errmsg, errflg) + + real(kind_phys), intent(in) :: tfreeze + real(kind_phys), intent(inout) :: cld_ice(:,:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + errmsg = '' + errflg = 0 + cld_ice = 0.0_kind_phys + tcld = tfreeze - 20.0_kind_phys + + end subroutine cld_ice_init + +END MODULE cld_ice diff --git a/test/advection_test/cld_ice.meta b/test/advection_test/cld_ice.meta new file mode 100644 index 00000000..9dd77db2 --- /dev/null +++ b/test/advection_test/cld_ice.meta @@ -0,0 +1,98 @@ +# cld_ice is a scheme that produces a cloud ice amount +[ccpp-table-properties] + name = cld_ice + type = scheme +[ccpp-arg-table] + name = cld_ice_run + type = scheme +[ ncol ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp ] + standard_name = temperature + units = K + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real + kind = kind_phys + intent = inout +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real + kind = kind_phys + intent = inout +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = in +[ cld_ice ] + standard_name = cloud_ice_dry_mixing_ratio + advected = .true. + units = kg kg-1 + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real | kind = kind_phys + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = cld_ice_init + type = scheme +[ tfreeze] + standard_name = water_temperature_at_freezing + long_name = Freezing temperature of water at sea level + units = K + dimensions = () + type = real | kind = kind_phys + intent = in +[ cld_ice ] + standard_name = cloud_ice_dry_mixing_ratio + advected = .true. + units = kg kg-1 + dimensions = (horizontal_dimension, vertical_layer_dimension) + type = real | kind = kind_phys + # Advected species that needs to be supplied by framework + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/advection_test/cld_liq.F90 b/test/advection_test/cld_liq.F90 new file mode 100644 index 00000000..2e1e5a57 --- /dev/null +++ b/test/advection_test/cld_liq.F90 @@ -0,0 +1,77 @@ +! Test parameterization with advected species +! + +MODULE cld_liq + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: cld_liq_init + PUBLIC :: cld_liq_run + +CONTAINS + + !> \section arg_table_cld_liq_run Argument Table + !! \htmlinclude arg_table_cld_liq_run.html + !! + subroutine cld_liq_run(ncol, timestep, tcld, temp, qv, ps, cld_liq, & + errmsg, errflg) + + integer, intent(in) :: ncol + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(in) :: tcld + real(kind_phys), intent(inout) :: temp(:,:) + real(kind_phys), intent(inout) :: qv(:,:) + real(kind_phys), intent(in) :: ps(:) + REAL(kind_phys), intent(inout) :: cld_liq(:,:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: icol + integer :: ilev + real(kind_phys) :: cond + + errmsg = '' + errflg = 0 + + ! Apply state-of-the-art thermodynamics :) + do icol = 1, ncol + do ilev = 1, size(temp, 2) + if ( (qv(icol, ilev) > 0.0_kind_phys) .and. & + (temp(icol, ilev) <= tcld)) then + cond = MIN(qv(icol, ilev), 0.1_kind_phys) + cld_liq(icol, ilev) = cld_liq(icol, ilev) + cond + qv(icol, ilev) = qv(icol, ilev) - cond + if (cond > 0.0_kind_phys) then + temp(icol, ilev) = temp(icol, ilev) + (cond * 5.0_kind_phys) + end if + end if + end do + end do + + END SUBROUTINE cld_liq_run + + !> \section arg_table_cld_liq_init Argument Table + !! \htmlinclude arg_table_cld_liq_init.html + !! + subroutine cld_liq_init(tfreeze, cld_liq, tcld, errmsg, errflg) + + real(kind_phys), intent(in) :: tfreeze + real(kind_phys), intent(out) :: cld_liq(:,:) + real(kind_phys), intent(out) :: tcld + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + cld_liq = 0.0_kind_phys + tcld = tfreeze - 20.0_kind_phys + + end subroutine cld_liq_init + +END MODULE cld_liq diff --git a/test/advection_test/cld_liq.meta b/test/advection_test/cld_liq.meta new file mode 100644 index 00000000..4e071091 --- /dev/null +++ b/test/advection_test/cld_liq.meta @@ -0,0 +1,110 @@ +# cld_liq is a scheme that produces a cloud liquid amount +[ccpp-table-properties] + name = cld_liq + type = scheme +[ccpp-arg-table] + name = cld_liq_run + type = scheme +[ ncol ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ tcld] + standard_name = minimum_temperature_for_cloud_liquid + units = K + dimensions = () + type = real | kind = kind_phys + intent = in +[ temp ] + standard_name = temperature + units = K + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real + kind = kind_phys + intent = inout +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real + kind = kind_phys + intent = inout +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = in +[ cld_liq ] + standard_name = cloud_liquid_dry_mixing_ratio + advected = .true. + units = kg kg-1 + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real | kind = kind_phys + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = cld_liq_init + type = scheme +[ tfreeze] + standard_name = water_temperature_at_freezing + long_name = Freezing temperature of water at sea level + units = K + dimensions = () + type = real | kind = kind_phys + intent = in +[ cld_liq ] + standard_name = cloud_liquid_dry_mixing_ratio + advected = .true. + units = kg kg-1 + dimensions = (horizontal_dimension, vertical_layer_dimension) + type = real | kind = kind_phys + # Advected species that needs to be promoted from suite. + intent = out +[ tcld] + standard_name = minimum_temperature_for_cloud_liquid + units = K + dimensions = () + type = real | kind = kind_phys + intent = out +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/advection_test/cld_suite.xml b/test/advection_test/cld_suite.xml new file mode 100644 index 00000000..f3fe1531 --- /dev/null +++ b/test/advection_test/cld_suite.xml @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<suite name="cld_suite" version="1.0"> + <group name="physics"> + <scheme>cld_liq</scheme> + <scheme>cld_ice</scheme> + </group> +</suite> diff --git a/test/advection_test/cld_suite_files.txt b/test/advection_test/cld_suite_files.txt new file mode 100644 index 00000000..a40306ed --- /dev/null +++ b/test/advection_test/cld_suite_files.txt @@ -0,0 +1,2 @@ +cld_liq.meta +cld_ice.meta diff --git a/test/advection_test/run_test b/test/advection_test/run_test new file mode 100755 index 00000000..905cf1e4 --- /dev/null +++ b/test/advection_test/run_test @@ -0,0 +1,245 @@ +#! /bin/bash + +currdir="`pwd -P`" +scriptdir="$( cd $( dirname $0 ); pwd -P )" + +## +## Option default values +## +defdir="at_build" +build_dir="${currdir}/${defdir}" +cleanup="PASS" # Other supported options are ALWAYS and NEVER +verbosity=0 + +## +## General syntax help function +## Usage: help <exit status> +## +help () { + local hname="Usage: `basename ${0}`" + local hprefix="`echo ${hname} | tr '[!-~]' ' '`" + echo "${hname} [ --build-dir <dir name> ] [ --cleanup <opt> ]" + echo "${hprefix} [ --verbosity <#> ]" + hprefix=" " + echo "" + echo "${hprefix} <dir name>: Directory for building and running the test" + echo "${hprefix} default is <current directory>/${defdir}" + echo "${hprefix} <opt>: Cleanup option is ALWAYS, NEVER, or PASS" + echo "${hprefix} default is PASS" + echo "${hprefix} verbosity: 0, 1, or 2" + echo "${hprefix} default is 0" + exit $1 +} + +## +## Error output function (should be handed a string) +## +perr() { + >&2 echo -e "\nERROR: ${@}\n" + exit 1 +} + +## +## Cleanup the build and test directory +## +docleanup() { + # We start off in the build directory + if [ "${build_dir}" == "${currdir}" ]; then + echo "WARNING: Cannot clean ${build_dir}" + else + cd ${currdir} + rm -rf ${build_dir} + fi +} + +## Process our input arguments +while [ $# -gt 0 ]; do + case $1 in + --h | -h | --help | -help) + help 0 + ;; + --build-dir) + if [ $# -lt 2 ]; then + perr "${1} requires a build directory" + fi + build_dir="${2}" + shift + ;; + --cleanup) + if [ $# -lt 2 ]; then + perr "${1} requies a cleanup option (ALWAYS, NEVER, PASS)" + fi + if [ "${2}" == "ALWAYS" -o "${2}" == "NEVER" -o "${2}" == "PASS" ]; then + cleanup="${2}" + else + perr "Allowed cleanup options: ALWAYS, NEVER, PASS" + fi + shift + ;; + --verbosity) + if [ $# -lt 2 ]; then + perr "${1} requires a verbosity value (0, 1, or 2)" + fi + if [ "${2}" == "0" -o "${2}" == "1" -o "${2}" == "2" ]; then + verbosity=$2 + else + perr "allowed verbosity levels are 0, 1, 2" + fi + shift + ;; + *) + perr "Unrecognized option, \"${1}\"" + ;; + esac + shift +done + +# Create the build directory, if necessary +if [ -d "${build_dir}" ]; then + # Always make sure build_dir is not in the test dir + if [ "$( cd ${build_dir}; pwd -P )" == "${currdir}" ]; then + build_dir="${build_dir}/${defdir}" + fi +else + mkdir -p ${build_dir} + res=$? + if [ $res -ne 0 ]; then + perr "Unable to create build directory, '${build_dir}'" + fi +fi +build_dir="$( cd ${build_dir}; pwd -P )" + +## framework is the CCPP Framework root dir +framework="$( cd $( dirname $( dirname ${scriptdir} ) ); pwd -P )" +fsrc="${framework}/src" + +## +## check strings for datafile command-list test +## NB: This has to be after build_dir is finalized +## +host_files="${build_dir}/ccpp/test_host_ccpp_cap.F90" +hash_files="${fsrc}/ccpp_hashable.F90,${fsrc}/ccpp_hash_table.F90" +suite_files="${build_dir}/ccpp/ccpp_cld_suite_cap.F90" +utility_files="${build_dir}/ccpp/ccpp_kinds.F90" +utility_files="${utility_files},${fsrc}/ccpp_constituent_prop_mod.F90" +utility_files="${utility_files},${hash_files}" +ccpp_files="${utility_files},${host_files},${suite_files}" +process_list="" +module_list="cld_ice,cld_liq" +dependencies="" +suite_list="cld_suite" +required_vars="ccpp_error_flag,ccpp_error_message" +required_vars="${required_vars},cloud_ice_dry_mixing_ratio" +required_vars="${required_vars},cloud_liquid_dry_mixing_ratio" +required_vars="${required_vars},horizontal_loop_begin" +required_vars="${required_vars},horizontal_loop_end" +required_vars="${required_vars},surface_air_pressure" +required_vars="${required_vars},temperature" +required_vars="${required_vars},time_step_for_physics" +required_vars="${required_vars},water_temperature_at_freezing" +required_vars="${required_vars},water_vapor_specific_humidity" +input_vars="cloud_ice_dry_mixing_ratio,cloud_liquid_dry_mixing_ratio" +input_vars="${input_vars},horizontal_loop_begin" +input_vars="${input_vars},horizontal_loop_end" +input_vars="${input_vars},surface_air_pressure,temperature" +input_vars="${input_vars},time_step_for_physics,water_temperature_at_freezing" +input_vars="${input_vars},water_vapor_specific_humidity" +output_vars="ccpp_error_flag,ccpp_error_message" +output_vars="${output_vars},cloud_ice_dry_mixing_ratio" +output_vars="${output_vars},cloud_liquid_dry_mixing_ratio" +output_vars="${output_vars},temperature" +output_vars="${output_vars},water_vapor_specific_humidity" + +## +## Run a database report and check the return string +## $1 is the report program file +## $2 is the database file +## $3 is the report string +## $4 is the check string +## $5+ are any optional arguments +## +check_datatable() { + local checkstr=${4} + local teststr + local prog=${1} + local database=${2} + local report=${3} + shift 4 + echo "Checking ${report} report" + teststr="`${prog} ${database} ${report} $@`" + if [ "${teststr}" != "${checkstr}" ]; then + perr "datatable check:\nExpected: '${checkstr}'\nGot: '${teststr}'" + fi +} + +# cd to the build directory +cd ${build_dir} +res=$? +if [ $res -ne 0 ]; then + perr "Unable to cd to build directory, '${build_dir}'" +fi +# Clean build directory +rm -rf * +res=$? +if [ $res -ne 0 ]; then + perr "Unable to clean build directory, '${build_dir}'" +fi +# Run CMake +opts="" +if [ $verbosity -gt 0 ]; then + opts="${opts} -DVERBOSITY=${verbosity}" +fi +# Run cmake +cmake ${scriptdir} ${opts} +res=$? +if [ $res -ne 0 ]; then + perr "CMake failed with exit code, ${res}" +fi +# Test the datafile user interface +report_prog="${framework}/scripts/ccpp_datafile.py" +datafile="${build_dir}/ccpp/datatable.xml" +echo "Running python interface tests" +python ${scriptdir}/test_reports.py ${build_dir} ${datafile} +res=$? +if [ $res -ne 0 ]; then + perr "python interface tests failed" +fi +echo "Running command line tests" +echo "Checking required files from command line:" +check_datatable ${report_prog} ${datafile} "--host-files" ${host_files} +check_datatable ${report_prog} ${datafile} "--suite-files" ${suite_files} +check_datatable ${report_prog} ${datafile} "--utility-files" ${utility_files} +check_datatable ${report_prog} ${datafile} "--ccpp-files" ${ccpp_files} +echo -e "\nChecking lists from command line" +check_datatable ${report_prog} ${datafile} "--process-list" "${process_list}" +check_datatable ${report_prog} ${datafile} "--module-list" ${module_list} +check_datatable ${report_prog} ${datafile} "--dependencies" "${dependencies}" +check_datatable ${report_prog} ${datafile} "--suite-list" ${suite_list} \ + --sep ";" +echo -e "\nChecking variables from command line" +check_datatable ${report_prog} ${datafile} "--required-variables" \ + ${required_vars} "cld_suite" +check_datatable ${report_prog} ${datafile} "--input-variables" \ + ${input_vars} "cld_suite" +check_datatable ${report_prog} ${datafile} "--output-variables" \ + ${output_vars} "cld_suite" +# Run make +make +res=$? +if [ $res -ne 0 ]; then + perr "make failed with exit code, ${res}" +fi +# Run test +./test_host +res=$? +if [ $res -ne 0 ]; then + perr "test_host failed with exit code, ${res}" +fi + +if [ "${cleanup}" == "ALWAYS" ]; then + docleanup +elif [ $res -eq 0 -a "${cleanup}" == "PASS" ]; then + docleanup +fi + +exit $res diff --git a/test/advection_test/test_host.F90 b/test/advection_test/test_host.F90 new file mode 100644 index 00000000..3e9add58 --- /dev/null +++ b/test/advection_test/test_host.F90 @@ -0,0 +1,463 @@ +module test_prog + + use ccpp_kinds, only: kind_phys + + implicit none + private + + public test_host + + ! Public data and interfaces + integer, public, parameter :: cs = 16 + integer, public, parameter :: cm = 36 + + type, public :: suite_info + character(len=cs) :: suite_name = '' + character(len=cs), pointer :: suite_parts(:) => NULL() + character(len=cm), pointer :: suite_input_vars(:) => NULL() + character(len=cm), pointer :: suite_output_vars(:) => NULL() + character(len=cm), pointer :: suite_required_vars(:) => NULL() + end type suite_info + + private :: check_list + private :: check_suite + private :: constituents_in ! Data from suites to dycore array + private :: constituents_out ! Data from dycore array to suires + private :: advect_constituents ! Move data around + +CONTAINS + + logical function check_list(test_list, chk_list, list_desc, suite_name) + ! Check a list (<test_list>) against its expected value (<chk_list>) + + ! Dummy arguments + character(len=*), intent(in) :: test_list(:) + character(len=*), intent(in) :: chk_list(:) + character(len=*), intent(in) :: list_desc + character(len=*), optional, intent(in) :: suite_name + + ! Local variables + logical :: found + integer :: num_items + integer :: lindex, tindex + integer, allocatable :: check_unique(:) + character(len=2) :: sep + character(len=256) :: errmsg + + check_list = .true. + errmsg = '' + + ! Check the list size + num_items = size(chk_list) + if (size(test_list) /= num_items) then + write(errmsg, '(a,i0,2a)') 'ERROR: Found ', size(test_list), & + ' ', trim(list_desc) + if (present(suite_name)) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') ' for suite, ', & + trim(suite_name) + end if + write(errmsg(len_trim(errmsg)+1:), '(a,i0)') ', should be ', num_items + write(6, *) trim(errmsg) + errmsg = '' + check_list = .false. + end if + + ! Now, check the list contents for 1-1 correspondence + if (check_list) then + allocate(check_unique(num_items)) + check_unique = -1 + do lindex = 1, num_items + found = .false. + do tindex = 1, num_items + if (trim(test_list(lindex)) == trim(chk_list(tindex))) then + check_unique(tindex) = lindex + found = .true. + exit + end if + end do + if (.not. found) then + check_list = .false. + write(errmsg, '(5a)') 'ERROR: ', trim(list_desc), ' item, ', & + trim(test_list(lindex)), ', was not found' + if (present(suite_name)) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') ' in suite, ', & + trim(suite_name) + end if + write(6, *) trim(errmsg) + errmsg = '' + end if + end do + if (check_list .and. ANY(check_unique < 0)) then + check_list = .false. + write(errmsg, '(3a)') 'ERROR: The following ', trim(list_desc), & + ' items were not found' + if (present(suite_name)) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') ' in suite, ', & + trim(suite_name) + end if + sep = '; ' + do lindex = 1, num_items + if (check_unique(lindex) < 0) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') sep, & + trim(chk_list(lindex)) + sep = ', ' + end if + end do + write(6, *) trim(errmsg) + errmsg = '' + end if + end if + + end function check_list + + logical function check_suite(test_suite) + use test_host_ccpp_cap, only: ccpp_physics_suite_part_list + use test_host_ccpp_cap, only: ccpp_physics_suite_variables + + ! Dummy argument + type(suite_info), intent(in) :: test_suite + ! Local variables + integer :: sind + logical :: check + integer :: errflg + character(len=512) :: errmsg + character(len=128), allocatable :: test_list(:) + + check_suite = .true. + ! First, check the suite parts + call ccpp_physics_suite_part_list(test_suite%suite_name, test_list, & + errmsg, errflg) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_parts, 'part names', & + suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + ! Check the input variables + call ccpp_physics_suite_variables(test_suite%suite_name, test_list, & + errmsg, errflg, input_vars=.true., output_vars=.false.) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_input_vars, & + 'input variable names', suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + ! Check the output variables + call ccpp_physics_suite_variables(test_suite%suite_name, test_list, & + errmsg, errflg, input_vars=.false., output_vars=.true.) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_output_vars, & + 'output variable names', suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + ! Check all required variables + call ccpp_physics_suite_variables(test_suite%suite_name, test_list, & + errmsg, errflg) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_required_vars, & + 'required variable names', suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + end function check_suite + + logical function constituents_in(num_host_fields) result(okay) + ! Copy advected species from physics to 'dynamics' array + use test_host_mod, only: phys_state, ncnst, index_qv + use test_host_ccpp_cap, only: test_host_ccpp_gather_constituents + + ! Dummy argument + integer, intent(in) :: num_host_fields ! Packed at beginning of Q + ! Local variables + integer :: q_off + integer :: errflg + character(len=512) :: errmsg + + okay = .true. + q_off = num_host_fields + 1 + call test_host_ccpp_gather_constituents(phys_state%q(:,:,q_off:), & + errflg=errflg, errmsg=errmsg) + if (errflg /= 0) then + write(6, *) "ERROR: gather_constituents failed, '", trim(errmsg), "'" + okay = .false. + end if + + end function constituents_in + + logical function constituents_out(num_host_fields) result(okay) + ! Copy advected constituents back to physics + use test_host_mod, only: phys_state, ncnst, index_qv + use test_host_ccpp_cap, only: test_host_ccpp_update_constituents + + ! Dummy argument + integer, intent(in) :: num_host_fields ! Packed at beginning of Q + ! Local variables + integer :: q_off + integer :: errflg + character(len=512) :: errmsg + + okay = .true. + q_off = num_host_fields + 1 + call test_host_ccpp_update_constituents(phys_state%q(:,:,q_off:), & + errflg=errflg, errmsg=errmsg) + if (errflg /= 0) then + write(6, *) "ERROR: update_constituents failed, '", trim(errmsg), "'" + okay = .false. + end if + + end function constituents_out + + subroutine advect_constituents() + use test_host_mod, only: phys_state, ncnst, index_qv, ncols, pver + use test_host_mod, only: twist_array + + ! Local variables + integer :: q_ind ! Constituent index + + do q_ind = 1, ncnst ! Skip checks, they were done in constituents_in + call twist_array(phys_state%q(:,:,q_ind)) + end do + end subroutine advect_constituents + + !> \section arg_table_test_host Argument Table + !! \htmlinclude arg_table_test_host.html + !! + subroutine test_host(retval, test_suites) + + use test_host_mod, only: num_time_steps, num_host_advected + use test_host_mod, only: init_data, compare_data + use test_host_mod, only: ncols, pver, pverp + use test_host_ccpp_cap, only: test_host_ccpp_register_constituents + use test_host_ccpp_cap, only: test_host_ccpp_number_constituents + use test_host_ccpp_cap, only: test_host_ccpp_physics_initialize + use test_host_ccpp_cap, only: test_host_ccpp_physics_timestep_initial + use test_host_ccpp_cap, only: test_host_ccpp_physics_run + use test_host_ccpp_cap, only: test_host_ccpp_physics_timestep_final + use test_host_ccpp_cap, only: test_host_ccpp_physics_finalize + use test_host_ccpp_cap, only: ccpp_physics_suite_list + + type(suite_info), intent(in) :: test_suites(:) + logical, intent(out) :: retval + + logical :: check + integer :: col_start, col_end + integer :: index, sind + integer :: time_step + integer :: num_suites + integer :: num_advected ! Num advected species + character(len=128), allocatable :: suite_names(:) + character(len=512) :: errmsg + integer :: errflg + + ! Gather and test the inspection routines + num_suites = size(test_suites) + call ccpp_physics_suite_list(suite_names) + retval = check_list(suite_names, test_suites(:)%suite_name, & + 'suite names') + write(6, *) 'Available suites are:' + do index = 1, size(suite_names) + do sind = 1, num_suites + if (trim(test_suites(sind)%suite_name) == & + trim(suite_names(index))) then + exit + end if + end do + write(6, '(i0,3a,i0,a)') index, ') ', trim(suite_names(index)), & + ' = test_suites(', sind, ')' + end do + if (retval) then + do sind = 1, num_suites + check = check_suite(test_suites(sind)) + retval = retval .and. check + end do + end if + !!! Return here if any check failed + if (.not. retval) then + return + end if + + ! Register the constituents to find out what needs advecting + call test_host_ccpp_register_constituents(suite_names(:), & + ncols, pver, pverp, errmsg=errmsg, errflg=errflg) + if (errflg /= 0) then + write(6, '(2a)') 'ERROR register_constituents: ', trim(errmsg) + end if + num_advected = test_host_ccpp_number_constituents(errmsg=errmsg, & + errflg=errflg) + if (num_advected /= 2) then + write(6, '(a,i0)') "ERROR: num advected constituents = ", num_advected + STOP 2 + end if + + ! Initialize our 'data' + call init_data(num_advected) + + ! Use the suite information to setup the run + do sind = 1, num_suites + call test_host_ccpp_physics_initialize(test_suites(sind)%suite_name,& + errmsg, errflg) + if (errflg /= 0) then + write(6, '(4a)') 'ERROR in initialize of ', & + trim(test_suites(sind)%suite_name), ': ', trim(errmsg) + exit + end if + end do + ! Loop over time steps + do time_step = 1, num_time_steps + ! Initialize the timestep + do sind = 1, num_suites + if (retval) then + call test_host_ccpp_physics_timestep_initial( & + test_suites(sind)%suite_name, errmsg, errflg) + if (errflg /= 0) then + write(6, '(3a)') trim(test_suites(sind)%suite_name), ': ', & + trim(errmsg) + end if + end if + end do + + do col_start = 1, ncols, 5 + if (errflg /= 0) then + continue + end if + col_end = MIN(col_start + 4, ncols) + + do sind = 1, num_suites + do index = 1, size(test_suites(sind)%suite_parts) + call test_host_ccpp_physics_run( & + test_suites(sind)%suite_name, & + test_suites(sind)%suite_parts(index), & + col_start, col_end, errmsg, errflg) + if (errflg /= 0) then + write(6, '(5a)') trim(test_suites(sind)%suite_name), & + '/', trim(test_suites(sind)%suite_parts(index)), & + ': ', trim(errmsg) + exit + end if + end do + end do + end do + + do sind = 1, num_suites + if (errflg == 0) then + call test_host_ccpp_physics_timestep_final( & + test_suites(sind)%suite_name, errmsg, errflg) + end if + if (errflg /= 0) then + write(6, '(3a)') trim(test_suites(sind)%suite_name), ': ', & + trim(errmsg) + end if + end do + + ! Run "dycore" + if (errflg == 0) then + check = constituents_in(num_host_advected) + end if + if (check) then + call advect_constituents() + check = constituents_out(num_host_advected) + end if + end do ! End time step loop + + do sind = 1, num_suites + if (errflg == 0) then + call test_host_ccpp_physics_finalize( & + test_suites(sind)%suite_name, errmsg, errflg) + if (errflg /= 0) then + write(6, '(3a)') test_suites(sind)%suite_parts(index), ': ', & + trim(errmsg) + write(6,'(2a)') 'An error occurred in ccpp_timestep_final, ', & + 'Exiting...' + end if + end if + end do + + if (errflg == 0) then + ! Run finished without error, check answers + if (compare_data(num_advected + num_host_advected)) then + write(6, *) 'Answers are correct!' + errflg = 0 + else + write(6, *) 'Answers are not correct!' + errflg = -1 + end if + end if + + retval = errflg == 0 + + end subroutine test_host + + end module test_prog + + program test + use test_prog, only: test_host, suite_info, cm, cs + + implicit none + + character(len=cs), target :: test_parts1(1) = (/ 'physics ' /) + character(len=cm), target :: test_invars1(7) = (/ & + 'cloud_ice_dry_mixing_ratio ', & + 'cloud_liquid_dry_mixing_ratio ', & + 'surface_air_pressure ', & + 'temperature ', & + 'time_step_for_physics ', & + 'water_temperature_at_freezing ', & + 'water_vapor_specific_humidity ' /) + character(len=cm), target :: test_outvars1(6) = (/ & + 'ccpp_error_message ', & + 'ccpp_error_flag ', & + 'temperature ', & + 'water_vapor_specific_humidity ', & + 'cloud_liquid_dry_mixing_ratio ', & + 'cloud_ice_dry_mixing_ratio ' /) + character(len=cm), target :: test_reqvars1(9) = (/ & + 'surface_air_pressure ', & + 'temperature ', & + 'time_step_for_physics ', & + 'cloud_liquid_dry_mixing_ratio ', & + 'cloud_ice_dry_mixing_ratio ', & + 'water_temperature_at_freezing ', & + 'water_vapor_specific_humidity ', & + 'ccpp_error_message ', & + 'ccpp_error_flag ' /) + + type(suite_info) :: test_suites(1) + logical :: run_okay + + ! Setup expected test suite info + test_suites(1)%suite_name = 'cld_suite' + test_suites(1)%suite_parts => test_parts1 + test_suites(1)%suite_input_vars => test_invars1 + test_suites(1)%suite_output_vars => test_outvars1 + test_suites(1)%suite_required_vars => test_reqvars1 + + call test_host(run_okay, test_suites) + + if (run_okay) then + STOP 0 + else + STOP -1 + end if + +end program test diff --git a/test/advection_test/test_host.meta b/test/advection_test/test_host.meta new file mode 100644 index 00000000..d648baf7 --- /dev/null +++ b/test/advection_test/test_host.meta @@ -0,0 +1,31 @@ +[ccpp-table-properties] + name = test_host + type = host +[ccpp-arg-table] + name = test_host + type = host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () + protected = True +[ col_end ] + standard_name = horizontal_loop_end + type = integer + units = count + dimensions = () + protected = True +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer diff --git a/test/advection_test/test_host_data.F90 b/test/advection_test/test_host_data.F90 new file mode 100644 index 00000000..10183cd6 --- /dev/null +++ b/test/advection_test/test_host_data.F90 @@ -0,0 +1,41 @@ +module test_host_data + + use ccpp_kinds, only: kind_phys + + !> \section arg_table_physics_state Argument Table + !! \htmlinclude arg_table_physics_state.html + type physics_state + real(kind_phys), dimension(:), allocatable :: & + ps ! surface pressure + real(kind_phys), dimension(:,:), allocatable :: & + temp ! temperature + real(kind_phys), dimension(:,:,:),allocatable :: & + q ! constituent mixing ratio (kg/kg moist or dry air depending on type) + end type physics_state + + public allocate_physics_state + +contains + + subroutine allocate_physics_state(cols, levels, constituents, state) + integer, intent(in) :: cols + integer, intent(in) :: levels + integer, intent(in) :: constituents + type(physics_state), intent(out) :: state + + if (allocated(state%ps)) then + deallocate(state%ps) + end if + allocate(state%ps(cols)) + if (allocated(state%temp)) then + deallocate(state%temp) + end if + allocate(state%temp(cols, levels)) + if (allocated(state%q)) then + deallocate(state%q) + end if + allocate(state%q(cols, levels, constituents)) + + end subroutine allocate_physics_state + +end module test_host_data diff --git a/test/advection_test/test_host_data.meta b/test/advection_test/test_host_data.meta new file mode 100644 index 00000000..9e03d268 --- /dev/null +++ b/test/advection_test/test_host_data.meta @@ -0,0 +1,32 @@ +[ccpp-table-properties] + name = physics_state + type = ddt +[ccpp-arg-table] + name = physics_state + type = ddt +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_dimension) +[ Temp ] + standard_name = temperature + units = K + dimensions = (horizontal_dimension, vertical_layer_dimension) + type = real | kind = kind_phys +[ q ] + standard_name = constituent_mixing_ratio + state_variable = true + type = real + kind = kind_phys + units = kg/kg moist or dry air depending on type + dimensions = (horizontal_dimension, vertical_layer_dimension, number_of_tracers) +[ q(:,:,index_of_water_vapor_specific_humidity) ] + standard_name = water_vapor_specific_humidity + state_variable = true + type = real + kind = kind_phys + units = kg kg-1 + dimensions = (horizontal_dimension, vertical_layer_dimension) diff --git a/test/advection_test/test_host_mod.F90 b/test/advection_test/test_host_mod.F90 new file mode 100644 index 00000000..3e4f60a5 --- /dev/null +++ b/test/advection_test/test_host_mod.F90 @@ -0,0 +1,165 @@ +module test_host_mod + + use ccpp_kinds, only: kind_phys + use test_host_data, only: physics_state, allocate_physics_state + + implicit none + public + + integer, parameter :: num_time_steps = 2 + real(kind_phys), parameter :: tolerance = 1.0e-13_kind_phys + + !> \section arg_table_test_host_mod Argument Table + !! \htmlinclude arg_table_test_host_host.html + !! + integer, parameter :: ncols = 10 + integer, parameter :: pver = 5 + integer, parameter :: pverP = pver + 1 + integer, parameter :: num_host_advected = 1 + integer, protected :: ncnst = -1 + integer, parameter :: index_qv = 1 + real(kind_phys) :: dt + real(kind_phys), parameter :: tfreeze = 273.15_kind_phys + type(physics_state) :: phys_state + integer :: num_model_times = -1 + integer, allocatable :: model_times(:) + + public :: init_data + public :: compare_data + public :: twist_array + + real(kind_phys), private, allocatable :: check_vals(:,:,:) + real(kind_phys), private :: check_temp(ncols, pver) + +contains + + subroutine init_data(num_advected) + + integer, intent(in) :: num_advected ! From suites + + integer :: col + integer :: lev + integer :: cind + integer :: itime + real(kind_phys) :: qmax + + ! Allocate and initialize state + ! Temperature starts above freezing and decreases to -30C + ! water vapor is initialized in odd columns to different amounts + ncnst = num_advected + num_host_advected + call allocate_physics_state(ncols, pver, ncnst, phys_state) + allocate(check_vals(ncols, pver, ncnst)) + check_vals(:,:,:) = 0.0_kind_phys + do lev = 1, pver + phys_state%temp(:, lev) = tfreeze + (10.0_kind_phys * (lev - 3)) + qmax = real(lev, kind_phys) + do col = 1, ncols + if (mod(col, 2) == 1) then + phys_state%q(col, lev, index_qv) = qmax + else + phys_state%q(col, lev, index_qv) = 0.0_kind_phys + end if + end do + end do + check_vals(:,:,index_qv) = phys_state%q(:,:,index_qv) + check_temp(:,:) = phys_state%temp(:,:) + ! Do timestep 1 + do col = 1, ncols, 2 + check_temp(col, 1) = check_temp(col, 1) + 0.5_kind_phys + check_vals(col, 1, 1) = check_vals(col, 1, 1) - 0.1_kind_phys + check_vals(col, 1, 3) = check_vals(col, 1, 3) + 0.1_kind_phys + end do + do itime = 1, num_time_steps + do cind = 1, ncnst + call twist_array(check_vals(:,:,cind)) + end do + end do + + end subroutine init_data + + subroutine twist_array(array) + ! Dummy argument + real(kind_phys), intent(inout) :: array(:,:) + + ! Local variables + integer :: q_ind ! Constituent index + integer :: icol, ilev ! Field coordinates + integer :: idir ! 'w' sign + integer :: levb, leve ! Starting and ending level indices + real(kind_phys) :: last_val, next_val + + idir = 1 + leve = (pver * mod(ncols, 2)) + mod(ncols-1, 2) + last_val = array(ncols, leve) + do icol = 1, ncols + levb = ((pver * (1 - idir)) + (1 + idir)) / 2 + leve = ((pver * (1 + idir)) + (1 - idir)) / 2 + do ilev = levb, leve, idir + next_val = array(icol, ilev) + array(icol, ilev) = last_val + last_val = next_val + end do + idir = -1 * idir + end do + + end subroutine twist_array + + logical function compare_data(ncnst) + + integer, intent(in) :: ncnst + + integer :: col + integer :: lev + integer :: cind + integer :: nind + logical :: need_header + real(kind_phys) :: check + real(kind_phys) :: denom + + compare_data = .true. + + need_header = .true. + do lev = 1, pver + do col = 1, ncols + check = check_temp(col, lev) + if (abs((phys_state%temp(col, lev) - check) / check) > & + tolerance) then + if (need_header) then + write(6, '(" COL LEV T MIDPOINTS EXPECTED")') + need_header = .false. + end if + write(6, '(2i5,2(3x,es15.7))') col, lev, & + phys_state%temp(col, lev), check + compare_data = .false. + end if + end do + end do + ! Check constituents + need_header = .true. + do cind = 1, ncnst + do lev = 1, pver + do col = 1, ncols + check = check_vals(col, lev, cind) + if (check < tolerance) then + denom = 1.0_kind_phys + else + denom = check + end if + if (abs((phys_state%q(col, lev, cind) - check) / denom) > & + tolerance) then + if (need_header) then + write(6, '(2(2x,a),3x,a,10x,a,14x,a)') & + 'COL', 'LEV', 'C#', 'Q', 'EXPECTED' + need_header = .false. + end if + write(6, '(3i5,2(3x,es15.7))') col, lev, cind, & + phys_state%q(col, lev, cind), check + compare_data = .false. + end if + end do + end do + end do + + end function compare_data + +end module test_host_mod diff --git a/test/advection_test/test_host_mod.meta b/test/advection_test/test_host_mod.meta new file mode 100644 index 00000000..9f04a6fc --- /dev/null +++ b/test/advection_test/test_host_mod.meta @@ -0,0 +1,64 @@ +[ccpp-table-properties] + name = test_host_mod + type = module +[ccpp-arg-table] + name = test_host_mod + type = module +[ ncols] + standard_name = horizontal_dimension + units = count + type = integer + protected = True + dimensions = () +[ pver ] + standard_name = vertical_layer_dimension + units = count + type = integer + protected = True + dimensions = () +[ pverP ] + standard_name = vertical_interface_dimension + type = integer + units = count + protected = True + dimensions = () +[ ncnst ] + standard_name = number_of_tracers + type = integer + units = count + protected = True + dimensions = () +[ index_qv ] + standard_name = index_of_water_vapor_specific_humidity + units = index + type = integer + protected = True + dimensions = () +[ dt ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real | kind = kind_phys +[ tfreeze ] + standard_name = water_temperature_at_freezing + long_name = Freezing temperature of water at sea level + units = K + dimensions = () + type = real | kind = kind_phys +[ phys_state ] + standard_name = physics_state_derived_type + long_name = Physics State DDT + type = physics_state + dimensions = () +[ num_model_times ] + standard_name = number_of_model_times + type = integer + units = count + dimensions = () +[ model_times ] + standard_name = model_times + units = seconds + dimensions = (number_of_model_times) + type = integer + allocatable = True diff --git a/test/advection_test/test_reports.py b/test/advection_test/test_reports.py new file mode 100644 index 00000000..a5a706cb --- /dev/null +++ b/test/advection_test/test_reports.py @@ -0,0 +1,157 @@ +#! /usr/bin/env python +""" +----------------------------------------------------------------------- + Description: Test advection database report python interface + + Assumptions: + + Command line arguments: build_dir database_filepath + + Usage: python test_reports <build_dir> <database_filepath> +----------------------------------------------------------------------- +""" +import sys +import os + +_TEST_DIR = os.path.dirname(os.path.abspath(__file__)) +_FRAMEWORK_DIR = os.path.abspath(os.path.join(_TEST_DIR, os.pardir, os.pardir)) +_SCRIPTS_DIR = os.path.abspath(os.path.join(_FRAMEWORK_DIR, "scripts")) + +if not os.path.exists(_SCRIPTS_DIR): + raise ImportError("Cannot find scripts directory") +# end if + +sys.path.append(_SCRIPTS_DIR) +# pylint: disable=wrong-import-position +from ccpp_datafile import datatable_report, DatatableReport +# pylint: enable=wrong-import-position + +def usage(errmsg=None): + """Raise an exception with optional error message and usage message""" + emsg = "usage: {} <build_dir> <database_filepath>" + if errmsg: + emsg = errmsg + '\n' + emsg + # end if + raise ValueError(emsg.format(sys.argv[0])) + +if len(sys.argv) != 3: + usage() +# end if + +_BUILD_DIR = os.path.abspath(sys.argv[1]) +_DATABASE = os.path.abspath(sys.argv[2]) +if not os.path.isdir(_BUILD_DIR): + _EMSG = "<build_dir> must be an existing build directory" + usage(_EMSG) +# end if +if (not os.path.exists(_DATABASE)) or (not os.path.isfile(_DATABASE)): + _EMSG = "<database_filepath> must be an existing CCPP database file" + usage(_EMSG) +# end if + +# Check data +_HOST_FILES = [os.path.join(_BUILD_DIR, "ccpp", "test_host_ccpp_cap.F90")] +_SUITE_FILES = [os.path.join(_BUILD_DIR, "ccpp", "ccpp_cld_suite_cap.F90")] +_UTILITY_FILES = [os.path.join(_BUILD_DIR, "ccpp", "ccpp_kinds.F90"), + os.path.join(_FRAMEWORK_DIR, "src", + "ccpp_constituent_prop_mod.F90"), + os.path.join(_FRAMEWORK_DIR, "src", "ccpp_hashable.F90"), + os.path.join(_FRAMEWORK_DIR, "src", "ccpp_hash_table.F90")] +_CCPP_FILES = _UTILITY_FILES + _HOST_FILES + _SUITE_FILES +_PROCESS_LIST = list() +_MODULE_LIST = ["cld_ice", "cld_liq"] +_SUITE_LIST = ["cld_suite"] +_REQUIRED_VARS_CLD = ["ccpp_error_flag", "ccpp_error_message", + "horizontal_loop_begin", "horizontal_loop_end", + "surface_air_pressure", "temperature", + "time_step_for_physics", "water_temperature_at_freezing", + "water_vapor_specific_humidity", + "cloud_ice_dry_mixing_ratio", + "cloud_liquid_dry_mixing_ratio"] +_INPUT_VARS_CLD = ["surface_air_pressure", "temperature", + "horizontal_loop_begin", "horizontal_loop_end", + "time_step_for_physics", "water_temperature_at_freezing", + "water_vapor_specific_humidity", + "cloud_ice_dry_mixing_ratio", + "cloud_liquid_dry_mixing_ratio"] +_OUTPUT_VARS_CLD = ["ccpp_error_flag", "ccpp_error_message", + "water_vapor_specific_humidity", "temperature", + "cloud_ice_dry_mixing_ratio", + "cloud_liquid_dry_mixing_ratio"] + +def fields_string(field_type, field_list, sep): + """Create an error string for <field_type> field(s), <field_list>. + <sep> is used to separate items in <field_list>""" + indent = ' '*11 + if field_list: + if len(field_list) > 1: + field_str = "{} Fields: ".format(field_type) + else: + field_str = "{} Field: ".format(field_type) + # end if + fmsg = "\n{}{}{}".format(indent, field_str, sep.join(field_list)) + else: + fmsg = "" + # end if + return fmsg + +def check_datatable(database, report_type, check_list, sep=','): + """Run a database report and check the return string. + If an error is found, print an error message. + Return the number of errors""" + if sep is None: + sep = ',' + # end if + test_str = datatable_report(database, report_type, sep) + test_list = [x for x in test_str.split(sep) if x] + missing = list() + unexpected = list() + for item in check_list: + if item not in test_list: + missing.append(item) + # end if + # end for + for item in test_list: + if item not in check_list: + unexpected.append(item) + # end if + # end for + if missing or unexpected: + vmsg = "ERROR in {} datafile check:".format(report_type.action) + vmsg += fields_string("Missing", missing, sep) + vmsg += fields_string("Unexpected", unexpected, sep) + print(vmsg) + else: + print("{} report okay".format(report_type.action)) + # end if + return len(missing) + len(unexpected) + +NUM_ERRORS = 0 +print("Checking required files from python:") +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("host_files"), + _HOST_FILES) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("suite_files"), + _SUITE_FILES) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("utility_files"), + _UTILITY_FILES) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("ccpp_files"), + _CCPP_FILES) +print("\nChecking lists from python") +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("process_list"), + _PROCESS_LIST) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("module_list"), + _MODULE_LIST) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("suite_list"), + _SUITE_LIST) +print("\nChecking variables for CLD suite from python") +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("required_variables", + value="cld_suite"), + _REQUIRED_VARS_CLD) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("input_variables", + value="cld_suite"), + _INPUT_VARS_CLD) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("output_variables", + value="cld_suite"), + _OUTPUT_VARS_CLD) + +sys.exit(NUM_ERRORS) diff --git a/test/capgen_test/.gitignore b/test/capgen_test/.gitignore new file mode 100644 index 00000000..378eac25 --- /dev/null +++ b/test/capgen_test/.gitignore @@ -0,0 +1 @@ +build diff --git a/test/capgen_test/CMakeLists.txt b/test/capgen_test/CMakeLists.txt new file mode 100644 index 00000000..f02213a0 --- /dev/null +++ b/test/capgen_test/CMakeLists.txt @@ -0,0 +1,187 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +PROJECT(test_host) +ENABLE_LANGUAGE(Fortran) + +include(CMakeForceCompiler) + +SET(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_SOURCE_DIR}/cmake/modules) + +#------------------------------------------------------------------------------ +# +# Set where the CCPP Framework lives +# +#------------------------------------------------------------------------------ +get_filename_component(TEST_ROOT "${CMAKE_SOURCE_DIR}" DIRECTORY) +get_filename_component(CCPP_ROOT "${TEST_ROOT}" DIRECTORY) +#------------------------------------------------------------------------------ +# +# Create list of SCHEME_FILES, HOST_FILES, and SUITE_FILES +# Paths should be relative to CMAKE_SOURCE_DIR (this file's directory) +# +#------------------------------------------------------------------------------ +LIST(APPEND SCHEME_FILES "temp_scheme_files.txt" "ddt_suite_files.txt") +LIST(APPEND HOST_FILES "test_host_data" "test_host_mod") +LIST(APPEND SUITE_FILES "ddt_suite.xml" "temp_suite.xml") +# HOST is the name of the executable we will build. +# We assume there are files ${HOST}.meta and ${HOST}.F90 in CMAKE_SOURCE_DIR +SET(HOST "${CMAKE_PROJECT_NAME}") + +#------------------------------------------------------------------------------ +# +# End of project-specific input +# +#------------------------------------------------------------------------------ + +# By default, no verbose output +SET(VERBOSITY 0 CACHE STRING "Verbosity level of output (default: 0)") +# By default, generated caps go in ccpp subdir +SET(CCPP_CAP_FILES "${CMAKE_BINARY_DIR}/ccpp" CACHE + STRING "Location of CCPP-generated cap files") + +SET(CCPP_FRAMEWORK ${CCPP_ROOT}/scripts) + +# Use rpaths on MacOSX +set(CMAKE_MACOSX_RPATH 1) + +#------------------------------------------------------------------------------ +# Set a default build type if none was specified +if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) + #message(STATUS "Setting build type to 'Debug' as none was specified.") + #set(CMAKE_BUILD_TYPE Debug CACHE STRING "Choose the type of build." FORCE) + message(STATUS "Setting build type to 'Release' as none was specified.") + set(CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build." FORCE) + + # Set the possible values of build type for cmake-gui + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "Debug" "Release" + "MinSizeRel" "RelWithDebInfo") +endif() + +ADD_COMPILE_OPTIONS(-O0) + +if (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") +# gfortran +# MESSAGE("gfortran being used.") + ADD_COMPILE_OPTIONS(-fcheck=all) + ADD_COMPILE_OPTIONS(-fbacktrace) + ADD_COMPILE_OPTIONS(-ffpe-trap=zero) + ADD_COMPILE_OPTIONS(-finit-real=nan) + ADD_COMPILE_OPTIONS(-ggdb) + ADD_COMPILE_OPTIONS(-ffree-line-length-none) + ADD_COMPILE_OPTIONS(-cpp) +elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "Intel") +# ifort +# MESSAGE("ifort being used.") + #ADD_COMPILE_OPTIONS(-check all) + ADD_COMPILE_OPTIONS(-fpe0) + ADD_COMPILE_OPTIONS(-warn) + ADD_COMPILE_OPTIONS(-traceback) + ADD_COMPILE_OPTIONS(-debug extended) + ADD_COMPILE_OPTIONS(-fpp) +elseif (${CMAKE_Fortran_COMPILER_ID} MATCHES "PGI") +# pgf90 +# MESSAGE("pgf90 being used.") + ADD_COMPILE_OPTIONS(-g) + ADD_COMPILE_OPTIONS(-Mipa=noconst) + ADD_COMPILE_OPTIONS(-traceback) + ADD_COMPILE_OPTIONS(-Mfree) + ADD_COMPILE_OPTIONS(-Mfptrap) + ADD_COMPILE_OPTIONS(-Mpreprocess) +else (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + message (WARNING "This program has only been compiled with gfortran, pgf90 and ifort. If another compiler is needed, the appropriate flags SHOULD be added in ${CMAKE_SOURCE_DIR}/CMakeLists.txt") +endif (${CMAKE_Fortran_COMPILER_ID} MATCHES "GNU") + +#------------------------------------------------------------------------------ +# CMake Modules +# Set the CMake module path +list(APPEND CMAKE_MODULE_PATH "${CCPP_FRAMEWORK}/cmake") +#------------------------------------------------------------------------------ +# Set OpenMP flags for C/C++/Fortran +if (OPENMP) + include(detect_openmp) + detect_openmp() + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") + set (CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${OpenMP_Fortran_FLAGS}") + message(STATUS "Enable OpenMP support for C/C++/Fortran compiler") +else(OPENMP) + message (STATUS "Disable OpenMP support for C/C++/Fortran compiler") +endif() + +# Create metadata and source file lists +FOREACH(FILE ${SCHEME_FILES}) + FILE(STRINGS ${FILE} FILENAMES) + LIST(APPEND SCHEME_FILENAMES ${FILENAMES}) +ENDFOREACH(FILE) +string(REPLACE ";" "," SCHEME_METADATA "${SCHEME_FILES}") + +FOREACH(FILE ${SCHEME_FILENAMES}) + # target_sources prefers absolute pathnames + string(REPLACE ".meta" ".F90" TEMP "${FILE}") + get_filename_component(ABS_PATH "${TEMP}" ABSOLUTE) + list(APPEND LIBRARY_LIST ${ABS_PATH}) +ENDFOREACH(FILE) + +FOREACH(FILE ${HOST_FILES}) + LIST(APPEND HOST_METADATA "${FILE}.meta") + # target_sources prefers absolute pathnames + get_filename_component(ABS_PATH "${FILE}.F90" ABSOLUTE) + LIST(APPEND HOST_SOURCE "${ABS_PATH}") +ENDFOREACH(FILE) +list(APPEND LIBRARY_LIST ${HOST_SOURCE}) +string(REPLACE ";" ".meta," HOST_METADATA "${HOST_FILES}") +set(HOST_METADATA "${HOST_METADATA}.meta,${HOST}.meta") + +string(REPLACE ";" "," SUITE_XML "${SUITE_FILES}") + +# Run ccpp_capgen +set(CAPGEN_CMD "${CCPP_FRAMEWORK}/ccpp_capgen.py") +list(APPEND CAPGEN_CMD "--host-files") +list(APPEND CAPGEN_CMD "${HOST_METADATA}") +list(APPEND CAPGEN_CMD "--scheme-files") +list(APPEND CAPGEN_CMD "${SCHEME_METADATA}") +list(APPEND CAPGEN_CMD "--suites") +list(APPEND CAPGEN_CMD "${SUITE_XML}") +list(APPEND CAPGEN_CMD "--host-name") +list(APPEND CAPGEN_CMD "test_host") +list(APPEND CAPGEN_CMD "--output-root") +list(APPEND CAPGEN_CMD "${CCPP_CAP_FILES}") +while (VERBOSITY GREATER 0) + list(APPEND CAPGEN_CMD "--verbose") + MATH(EXPR VERBOSITY "${VERBOSITY} - 1") +endwhile () +string(REPLACE ";" " " CAPGEN_STRING "${CAPGEN_CMD}") +MESSAGE(STATUS "Running: ${CAPGEN_STRING}") +EXECUTE_PROCESS(COMMAND ${CAPGEN_CMD} WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE CAPGEN_OUT ERROR_VARIABLE CAPGEN_OUT RESULT_VARIABLE RES) +MESSAGE(STATUS "${CAPGEN_OUT}") +if (RES EQUAL 0) + MESSAGE(STATUS "CCPP cap generation completed") +else(RES EQUAL 0) + MESSAGE(FATAL_ERROR "CCPP cap generation FAILED: result = ${RES}") +endif(RES EQUAL 0) + +# Retrieve the list of files from datatable.xml and set to CCPP_CAPS +set(DTABLE_CMD "${CCPP_FRAMEWORK}/ccpp_datafile.py") +list(APPEND DTABLE_CMD "${CCPP_CAP_FILES}/datatable.xml") +list(APPEND DTABLE_CMD "--ccpp-files") +list(APPEND DTABLE_CMD "--separator=\\;") +string(REPLACE ";" " " DTABLE_STRING "${DTABLE_CMD}") +MESSAGE(STATUS "Running: ${DTABLE_STRING}") +EXECUTE_PROCESS(COMMAND ${DTABLE_CMD} OUTPUT_VARIABLE CCPP_CAPS + RESULT_VARIABLE RES + OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE) +message(STATUS "CCPP_CAPS = ${CCPP_CAPS}") +if (RES EQUAL 0) + MESSAGE(STATUS "CCPP cap files retrieved") +else(RES EQUAL 0) + MESSAGE(FATAL_ERROR "CCPP cap file retrieval FAILED: result = ${RES}") +endif(RES EQUAL 0) +list(APPEND LIBRARY_LIST ${CCPP_CAPS}) +add_library(TESTLIB OBJECT ${LIBRARY_LIST}) +ADD_EXECUTABLE(${HOST} ${HOST}.F90 $<TARGET_OBJECTS:TESTLIB>) + +INCLUDE_DIRECTORIES(${CCPP_CAP_FILES}) + +set_target_properties(${HOST} PROPERTIES + COMPILE_FLAGS "${CMAKE_Fortran_FLAGS}" + LINK_FLAGS "${CMAKE_Fortran_FLAGS}") diff --git a/test/capgen_test/README.md b/test/capgen_test/README.md new file mode 100644 index 00000000..127544e0 --- /dev/null +++ b/test/capgen_test/README.md @@ -0,0 +1,6 @@ +ccpp_capgen test +=========== + +To build and run the ccpp_capgen test, run ./run_test +This script will build and run the test. +The exit code is zero (0) on PASS and non-zero on FAIL. diff --git a/test/capgen_test/ddt_suite.xml b/test/capgen_test/ddt_suite.xml new file mode 100644 index 00000000..749bb3bc --- /dev/null +++ b/test/capgen_test/ddt_suite.xml @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<suite name="ddt_suite" version="1.0"> + <group name="data_prep"> + <scheme>make_ddt</scheme> + <scheme>environ_conditions</scheme> + </group> +</suite> diff --git a/test/capgen_test/ddt_suite_files.txt b/test/capgen_test/ddt_suite_files.txt new file mode 100644 index 00000000..7f96a84c --- /dev/null +++ b/test/capgen_test/ddt_suite_files.txt @@ -0,0 +1,2 @@ +make_ddt.meta +environ_conditions.meta diff --git a/test/capgen_test/environ_conditions.F90 b/test/capgen_test/environ_conditions.F90 new file mode 100644 index 00000000..62183012 --- /dev/null +++ b/test/capgen_test/environ_conditions.F90 @@ -0,0 +1,96 @@ +MODULE environ_conditions + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: environ_conditions_init + PUBLIC :: environ_conditions_run + PUBLIC :: environ_conditions_finalize + + integer, parameter :: input_model_times = 3 + integer, parameter :: input_model_values(input_model_times) = (/ 31, 37, 41 /) + +CONTAINS + +!> \section arg_table_environ_conditions_run Argument Table +!! \htmlinclude arg_table_environ_conditions_run.html +!! + subroutine environ_conditions_run(psurf, errmsg, errflg) + + ! This routine currently does nothing -- should update values + + real(kind_phys), intent(in) :: psurf(:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + errmsg = '' + errflg = 0 + + END SUBROUTINE environ_conditions_run + +!> \section arg_table_environ_conditions_init Argument Table +!! \htmlinclude arg_table_environ_conditions_init.html +!! + subroutine environ_conditions_init (nbox, O3, HNO3, ntimes, model_times, & + errmsg, errflg) + + integer, intent(in) :: nbox + real(kind_phys), intent(out) :: O3(:) + real(kind_phys), intent(out) :: HNO3(:) + integer, intent(out) :: ntimes + integer, allocatable, intent(out) :: model_times(:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg +!---------------------------------------------------------------- + + integer :: i, j + + errmsg = '' + errflg = 0 + + ! This may be replaced with MusicBox json environmental conditions reader??? + + do i = 1, nbox + O3(i) = real(i, kind_phys) * 1.e-6_kind_phys + HNO3(i) = real(i, kind_phys) * 1.e-9_kind_phys + end do + + ntimes = input_model_times + allocate(model_times(ntimes)) + model_times = input_model_values + + end subroutine environ_conditions_init + +!> \section arg_table_environ_conditions_finalize Argument Table +!! \htmlinclude arg_table_environ_conditions_finalize.html +!! + subroutine environ_conditions_finalize (ntimes, model_times, errmsg, errflg) + + integer, intent(in) :: ntimes + integer, intent(in) :: model_times(:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine checks the size and values of model_times + if (ntimes /= input_model_times) then + errflg = 1 + write(errmsg, '(2(a,i0))') 'ntimes mismatch, ', ntimes, ' should be ', & + input_model_times + else if (size(model_times) /= input_model_times) then + errflg = 1 + write(errmsg, '(2(a,i0))') 'model_times size mismatch, ', & + size(model_times), ' should be ', input_model_times + else if (ANY(model_times /= input_model_values)) then + errflg = 1 + write(errmsg, *) 'model_times mismatch, ', & + model_times, ' should be ', input_model_values + else + errmsg = '' + errflg = 0 + end if + + end subroutine environ_conditions_finalize + +END MODULE environ_conditions diff --git a/test/capgen_test/environ_conditions.meta b/test/capgen_test/environ_conditions.meta new file mode 100644 index 00000000..114f151f --- /dev/null +++ b/test/capgen_test/environ_conditions.meta @@ -0,0 +1,110 @@ +[ccpp-table-properties] + name = environ_conditions + type = scheme +[ccpp-arg-table] + name = environ_conditions_run + type = scheme +[ psurf ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = environ_conditions_init + type = scheme +[ nbox ] + standard_name = horizontal_dimension + type = integer + units = count + dimensions = () + intent = in +[ o3 ] + standard_name = ozone + units = ppmv + dimensions = (horizontal_dimension) + type = real + kind = kind_phys + intent = out +[ hno3 ] + standard_name = nitric_acid + units = ppmv + dimensions = (horizontal_dimension) + type = real + kind = kind_phys + intent = out +[ ntimes ] + standard_name = number_of_model_times + type = integer + units = count + dimensions = () + intent = out +[ model_times ] + standard_name = model_times + units = seconds + dimensions = (number_of_model_times) + type = integer + intent = out + allocatable = True +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = environ_conditions_finalize + type = scheme +[ ntimes ] + standard_name = number_of_model_times + type = integer + units = count + dimensions = () + intent = in +[ model_times ] + standard_name = model_times + units = seconds + dimensions = (number_of_model_times) + type = integer + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/capgen_test/make_ddt.F90 b/test/capgen_test/make_ddt.F90 new file mode 100644 index 00000000..483cc570 --- /dev/null +++ b/test/capgen_test/make_ddt.F90 @@ -0,0 +1,132 @@ +!Hello demonstration parameterization +! + +MODULE make_ddt + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: make_ddt_init + PUBLIC :: make_ddt_run + PUBLIC :: make_ddt_timestep_final + PUBLIC :: vmr_type + + !> \section arg_table_vmr_type Argument Table + !! \htmlinclude arg_table_vmr_type.html + !! + type vmr_type + integer :: nvmr + real(kind_phys), allocatable :: vmr_array(:,:) + end type vmr_type + + +CONTAINS + + !> \section arg_table_make_ddt_run Argument Table + !! \htmlinclude arg_table_make_ddt_run.html + !! + SUBROUTINE make_ddt_run(cols, cole, O3, HNO3, vmr, errmsg, errflg) + !---------------------------------------------------------------- + IMPLICIT NONE + !---------------------------------------------------------------- + + ! Dummy arguments + integer, intent(in) :: cols + integer, intent(in) :: cole + REAL(kind_phys), intent(in) :: O3(:) + REAL(kind_phys), intent(in) :: HNO3(:) + type(vmr_type), intent(inout) :: vmr + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + ! Local variable + integer :: nbox + !---------------------------------------------------------------- + + errmsg = '' + errflg = 0 + + ! Check for correct threading behavior + nbox = cole - cols + 1 + if (SIZE(O3) /= nbox) then + errflg = 1 + write(errmsg, '(2(a,i0))') 'SIZE(O3) = ', SIZE(O3), ', should be ', nbox + else if (SIZE(HNO3) /= nbox) then + errflg = 1 + write(errmsg, '(2(a,i0))') 'SIZE(HNO3) = ', SIZE(HNO3), & + ', should be ', nbox + else + ! NOTE -- This is prototyping one approach to passing a large number of + ! chemical VMR values and is the predecssor for adding in methods and + ! maybe nesting DDTs (especially for aerosols) + vmr%vmr_array(cols:cole, 1) = O3(:) + vmr%vmr_array(cols:cole, 2) = HNO3(:) + end if + + END SUBROUTINE make_ddt_run + + !> \section arg_table_make_ddt_init Argument Table + !! \htmlinclude arg_table_make_ddt_init.html + !! + subroutine make_ddt_init(nbox, vmr, errmsg, errflg) + + ! Dummy arguments + integer, intent(in) :: nbox + type(vmr_type), intent(out) :: vmr + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine initializes the vmr array + vmr%nvmr = 2 + allocate(vmr%vmr_array(nbox, vmr%nvmr)) + + errmsg = '' + errflg = 0 + + end subroutine make_ddt_init + + !> \section arg_table_make_ddt_timestep_final Argument Table + !! \htmlinclude arg_table_make_ddt_timestep_final.html + !! + subroutine make_ddt_timestep_final (ncols, vmr, errmsg, errflg) + + ! Dummy arguments + integer, intent(in) :: ncols + type(vmr_type), intent(in) :: vmr + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + ! Local variables + integer :: index + real(kind_phys) :: rind + + errmsg = '' + errflg = 0 + + ! This routine checks the array values in vmr + if (SIZE(vmr%vmr_array, 1) /= ncols) then + errflg = 1 + write(errmsg, '(2(a,i0))') 'VMR%VMR_ARRAY first dimension size is, ', & + SIZE(vmr%vmr_array, 1), ', should be, ', ncols + else + do index = 1, ncols + rind = real(index, kind_phys) + if (vmr%vmr_array(index, 1) /= rind * 1.e-6_kind_phys) then + errflg = 1 + write(errmsg, '(a,i0,2(a,e12.4))') 'O3(', index, ') = ', & + vmr%vmr_array(index, 1), ', should be, ', & + rind * 1.e-6_kind_phys + exit + else if (vmr%vmr_array(index, 2) /= rind * 1.e-9_kind_phys) then + errflg = 1 + write(errmsg, '(a,i0,2(a,e12.4))') 'HNO3(', index, ') = ', & + vmr%vmr_array(index, 2), ', should be, ', & + rind * 1.e-9_kind_phys + exit + end if + end do + end if + + end subroutine make_ddt_timestep_final + +END MODULE make_ddt diff --git a/test/capgen_test/make_ddt.meta b/test/capgen_test/make_ddt.meta new file mode 100644 index 00000000..2d1f766d --- /dev/null +++ b/test/capgen_test/make_ddt.meta @@ -0,0 +1,127 @@ +[ccpp-table-properties] + name = vmr_type + type = ddt +[ccpp-arg-table] + name = vmr_type + type = ddt +[ nvmr ] + standard_name = number_of_chemical_species + units = count + dimensions = () + type = integer +[ vmr_array ] + standard_name = array_of_volume_mixing_ratios + units = ppmv + dimensions = (horizontal_loop_extent, number_of_chemical_species) + type = real + kind = kind_phys +[ccpp-table-properties] + name = make_ddt + type = scheme +[ccpp-arg-table] + name = make_ddt_run + type = scheme +[ cols ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () + intent = in +[ cole ] + standard_name = horizontal_loop_end + type = integer + units = count + dimensions = () + intent = in +[ O3 ] + standard_name = ozone + units = ppmv + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ HNO3 ] + standard_name = nitric_acid + units = ppmv + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ vmr ] + standard_name = volume_mixing_ratio_ddt + dimensions = () + type = vmr_type + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = make_ddt_init + type = scheme +[ nbox ] + standard_name = horizontal_dimension + type = integer + units = count + dimensions = () + intent = in +[ vmr ] + standard_name = volume_mixing_ratio_ddt + dimensions = () + type = vmr_type + intent = out +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = make_ddt_timestep_final + type = scheme +[ ncols ] + standard_name = horizontal_dimension + type = integer + units = count + dimensions = () + intent = in +[ vmr ] + standard_name = volume_mixing_ratio_ddt + dimensions = () + type = vmr_type + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/capgen_test/run_test b/test/capgen_test/run_test new file mode 100755 index 00000000..c585092a --- /dev/null +++ b/test/capgen_test/run_test @@ -0,0 +1,271 @@ +#! /bin/bash + +currdir="`pwd -P`" +scriptdir="$( cd $( dirname $0 ); pwd -P )" + +## +## Option default values +## +defdir="ct_build" +build_dir="${currdir}/${defdir}" +cleanup="PASS" # Other supported options are ALWAYS and NEVER +verbosity=0 + +## +## General syntax help function +## Usage: help <exit status> +## +help () { + local hname="Usage: `basename ${0}`" + local hprefix="`echo ${hname} | tr '[!-~]' ' '`" + echo "${hname} [ --build-dir <dir name> ] [ --cleanup <opt> ]" + echo "${hprefix} [ --verbosity <#> ]" + hprefix=" " + echo "" + echo "${hprefix} <dir name>: Directory for building and running the test" + echo "${hprefix} default is <current directory>/${defdir}" + echo "${hprefix} <opt>: Cleanup option is ALWAYS, NEVER, or PASS" + echo "${hprefix} default is PASS" + echo "${hprefix} verbosity: 0, 1, or 2" + echo "${hprefix} default is 0" + exit $1 +} + +## +## Error output function (should be handed a string) +## +perr() { + >&2 echo -e "\nERROR: ${@}\n" + exit 1 +} + +## +## Cleanup the build and test directory +## +docleanup() { + # We start off in the build directory + if [ "${build_dir}" == "${currdir}" ]; then + echo "WARNING: Cannot clean ${build_dir}" + else + cd ${currdir} + rm -rf ${build_dir} + fi +} + +## Process our input arguments +while [ $# -gt 0 ]; do + case $1 in + --h | -h | --help | -help) + help 0 + ;; + --build-dir) + if [ $# -lt 2 ]; then + perr "${1} requires a build directory" + fi + build_dir="${2}" + shift + ;; + --cleanup) + if [ $# -lt 2 ]; then + perr "${1} requies a cleanup option (ALWAYS, NEVER, PASS)" + fi + if [ "${2}" == "ALWAYS" -o "${2}" == "NEVER" -o "${2}" == "PASS" ]; then + cleanup="${2}" + else + perr "Allowed cleanup options: ALWAYS, NEVER, PASS" + fi + shift + ;; + --verbosity) + if [ $# -lt 2 ]; then + perr "${1} requires a verbosity value (0, 1, or 2)" + fi + if [ "${2}" == "0" -o "${2}" == "1" -o "${2}" == "2" ]; then + verbosity=$2 + else + perr "allowed verbosity levels are 0, 1, 2" + fi + shift + ;; + *) + perr "Unrecognized option, \"${1}\"" + ;; + esac + shift +done + +# Create the build directory, if necessary +if [ -d "${build_dir}" ]; then + # Always make sure build_dir is not in the test dir + if [ "$( cd ${build_dir}; pwd -P )" == "${currdir}" ]; then + build_dir="${build_dir}/${defdir}" + fi +else + mkdir -p ${build_dir} + res=$? + if [ $res -ne 0 ]; then + perr "Unable to create build directory, '${build_dir}'" + fi +fi +build_dir="$( cd ${build_dir}; pwd -P )" + +## framework is the CCPP Framework root dir +framework="$( cd $( dirname $( dirname ${scriptdir} ) ); pwd -P )" +frame_src="${framework}/src" + +## +## check strings for datafile command-list test +## NB: This has to be after build_dir is finalized +## +host_files="${build_dir}/ccpp/test_host_ccpp_cap.F90" +suite_files="${build_dir}/ccpp/ccpp_ddt_suite_cap.F90" +suite_files="${suite_files},${build_dir}/ccpp/ccpp_temp_suite_cap.F90" +utility_files="${build_dir}/ccpp/ccpp_kinds.F90" +utility_files="${utility_files},${frame_src}/ccpp_constituent_prop_mod.F90" +utility_files="${utility_files},${frame_src}/ccpp_hashable.F90" +utility_files="${utility_files},${frame_src}/ccpp_hash_table.F90" +ccpp_files="${utility_files}" +ccpp_files="${ccpp_files},${build_dir}/ccpp/test_host_ccpp_cap.F90" +ccpp_files="${ccpp_files},${build_dir}/ccpp/ccpp_ddt_suite_cap.F90" +ccpp_files="${ccpp_files},${build_dir}/ccpp/ccpp_temp_suite_cap.F90" +process_list="setter=temp_set,adjusting=temp_calc_adjust" +module_list="environ_conditions,make_ddt,temp_adjust,temp_calc_adjust,temp_set" +dependencies="bar.F90,foo.F90,qux.F90" +suite_list="ddt_suite;temp_suite" +required_vars_ddt="ccpp_error_flag,ccpp_error_message,horizontal_dimension" +required_vars_ddt="${required_vars_ddt},horizontal_loop_begin" +required_vars_ddt="${required_vars_ddt},horizontal_loop_end" +required_vars_ddt="${required_vars_ddt},model_times" +required_vars_ddt="${required_vars_ddt},number_of_model_times" +required_vars_ddt="${required_vars_ddt},surface_air_pressure" +input_vars_ddt="horizontal_dimension" +input_vars_ddt="${input_vars_ddt},horizontal_loop_begin" +input_vars_ddt="${input_vars_ddt},horizontal_loop_end" +input_vars_ddt="${input_vars_ddt},model_times,number_of_model_times" +input_vars_ddt="${input_vars_ddt},surface_air_pressure" +output_vars_ddt="ccpp_error_flag,ccpp_error_message" +output_vars_ddt="${output_vars_ddt},model_times,number_of_model_times" +required_vars_temp="ccpp_error_flag,ccpp_error_message,horizontal_dimension" +required_vars_temp="${required_vars_temp},horizontal_loop_begin" +required_vars_temp="${required_vars_temp},horizontal_loop_end" +required_vars_temp="${required_vars_temp},potential_temperature" +required_vars_temp="${required_vars_temp},potential_temperature_at_interface" +required_vars_temp="${required_vars_temp},potential_temperature_increment" +required_vars_temp="${required_vars_temp},surface_air_pressure" +required_vars_temp="${required_vars_temp},time_step_for_physics" +required_vars_temp="${required_vars_temp},vertical_layer_dimension" +required_vars_temp="${required_vars_temp},water_vapor_specific_humidity" +input_vars_temp="horizontal_dimension" +input_vars_temp="${input_vars_temp},horizontal_loop_begin" +input_vars_temp="${input_vars_temp},horizontal_loop_end" +input_vars_temp="${input_vars_temp},potential_temperature" +input_vars_temp="${input_vars_temp},potential_temperature_at_interface" +input_vars_temp="${input_vars_temp},potential_temperature_increment" +input_vars_temp="${input_vars_temp},surface_air_pressure,time_step_for_physics" +input_vars_temp="${input_vars_temp},vertical_layer_dimension" +input_vars_temp="${input_vars_temp},water_vapor_specific_humidity" +output_vars_temp="ccpp_error_flag,ccpp_error_message,potential_temperature" +output_vars_temp="${output_vars_temp},potential_temperature_at_interface" +output_vars_temp="${output_vars_temp},surface_air_pressure" +output_vars_temp="${output_vars_temp},water_vapor_specific_humidity" + +## +## Run a database report and check the return string +## $1 is the report program file +## $2 is the database file +## $3 is the report string +## $4 is the check string +## $5+ are any optional arguments +## +check_datatable() { + local checkstr=${4} + local teststr + local prog=${1} + local database=${2} + local report=${3} + shift 4 + echo "Checking ${report} report" + teststr="`${prog} ${database} ${report} $@`" + if [ "${teststr}" != "${checkstr}" ]; then + perr "datatable check:\nExpected: '${checkstr}'\nGot: '${teststr}'" + fi +} + +# cd to the build directory +cd ${build_dir} +res=$? +if [ $res -ne 0 ]; then + perr "Unable to cd to build directory, '${build_dir}'" +fi +# Clean build directory +rm -rf * +res=$? +if [ $res -ne 0 ]; then + perr "Unable to clean build directory, '${build_dir}'" +fi +# Run CMake +opts="" +if [ $verbosity -gt 0 ]; then + opts="${opts} -DVERBOSITY=${verbosity}" +fi +# Run cmake +cmake ${scriptdir} ${opts} +res=$? +if [ $res -ne 0 ]; then + perr "CMake failed with exit code, ${res}" +fi +# Test the datafile user interface +report_prog="${framework}/scripts/ccpp_datafile.py" +datafile="${build_dir}/ccpp/datatable.xml" +echo "Running python interface tests" +python ${scriptdir}/test_reports.py ${build_dir} ${datafile} +res=$? +if [ $res -ne 0 ]; then + perr "python interface tests failed" +fi +echo "Running command line tests" +echo "Checking required files from command line:" +check_datatable ${report_prog} ${datafile} "--host-files" ${host_files} +check_datatable ${report_prog} ${datafile} "--suite-files" ${suite_files} +check_datatable ${report_prog} ${datafile} "--utility-files" ${utility_files} +check_datatable ${report_prog} ${datafile} "--ccpp-files" ${ccpp_files} +echo -e "\nChecking lists from command line" +check_datatable ${report_prog} ${datafile} "--process-list" ${process_list} +check_datatable ${report_prog} ${datafile} "--module-list" ${module_list} +check_datatable ${report_prog} ${datafile} "--dependencies" ${dependencies} +check_datatable ${report_prog} ${datafile} "--suite-list" ${suite_list} \ + --sep ";" +echo -e "\nChecking variables for DDT suite from command line" +check_datatable ${report_prog} ${datafile} "--required-variables" \ + ${required_vars_ddt} "ddt_suite" +check_datatable ${report_prog} ${datafile} "--input-variables" \ + ${input_vars_ddt} "ddt_suite" +check_datatable ${report_prog} ${datafile} "--output-variables" \ + ${output_vars_ddt} "ddt_suite" +echo -e "\nChecking variables for temp suite from command line" +check_datatable ${report_prog} ${datafile} "--required-variables" \ + ${required_vars_temp} "temp_suite" +check_datatable ${report_prog} ${datafile} "--input-variables" \ + ${input_vars_temp} "temp_suite" +check_datatable ${report_prog} ${datafile} "--output-variables" \ + ${output_vars_temp} "temp_suite" +# Run make +make +res=$? +if [ $res -ne 0 ]; then + perr "make failed with exit code, ${res}" +fi +# Run test +./test_host +res=$? +if [ $res -ne 0 ]; then + perr "test_host failed with exit code, ${res}" +fi + +if [ "${cleanup}" == "ALWAYS" ]; then + docleanup +elif [ $res -eq 0 -a "${cleanup}" == "PASS" ]; then + docleanup +fi + +exit $res diff --git a/test/capgen_test/temp_adjust.F90 b/test/capgen_test/temp_adjust.F90 new file mode 100644 index 00000000..52379e22 --- /dev/null +++ b/test/capgen_test/temp_adjust.F90 @@ -0,0 +1,82 @@ +! Test parameterization with no vertical level +! + +MODULE temp_adjust + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: temp_adjust_init + PUBLIC :: temp_adjust_run + PUBLIC :: temp_adjust_finalize + +CONTAINS + + !> \section arg_table_temp_adjust_run Argument Table + !! \htmlinclude arg_table_temp_adjust_run.html + !! + subroutine temp_adjust_run(foo, timestep, temp_prev, temp_layer, qv, ps, & + errmsg, errflg, innie, outie, optsie) + + integer, intent(in) :: foo + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: qv(:) + real(kind_phys), intent(inout) :: ps(:) + REAL(kind_phys), intent(in) :: temp_prev(:) + REAL(kind_phys), intent(inout) :: temp_layer(foo) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + real(kind_phys), optional, intent(in) :: innie + real(kind_phys), optional, intent(out) :: outie + real(kind_phys), optional, intent(inout) :: optsie + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + do col_index = 1, foo + temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) + qv(col_index) = qv(col_index) + 1.0_kind_phys + end do + if (present(innie) .and. present(outie) .and. present(optsie)) then + outie = innie * optsie + optsie = optsie + 1.0_kind_phys + end if + + END SUBROUTINE temp_adjust_run + + !> \section arg_table_temp_adjust_init Argument Table + !! \htmlinclude arg_table_temp_adjust_init.html + !! + subroutine temp_adjust_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_adjust_init + + !> \section arg_table_temp_adjust_finalize Argument Table + !! \htmlinclude arg_table_temp_adjust_finalize.html + !! + subroutine temp_adjust_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_adjust_finalize + +END MODULE temp_adjust diff --git a/test/capgen_test/temp_adjust.meta b/test/capgen_test/temp_adjust.meta new file mode 100644 index 00000000..14ad0051 --- /dev/null +++ b/test/capgen_test/temp_adjust.meta @@ -0,0 +1,102 @@ +[ccpp-table-properties] + name = temp_adjust + type = scheme +[ccpp-arg-table] + name = temp_adjust_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_prev ] + standard_name = potential_temperature_at_previous_timestep + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout + diagnostic_name = temperature +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout + diagnostic_name_fixed = Q +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_adjust_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_adjust_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/capgen_test/temp_calc_adjust.F90 b/test/capgen_test/temp_calc_adjust.F90 new file mode 100644 index 00000000..2094ecec --- /dev/null +++ b/test/capgen_test/temp_calc_adjust.F90 @@ -0,0 +1,70 @@ +!Test parameterization with no vertical level and hanging intent(out) variable +! + +MODULE temp_calc_adjust + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: temp_calc_adjust_init + PUBLIC :: temp_calc_adjust_run + PUBLIC :: temp_calc_adjust_finalize + +CONTAINS + + !> \section arg_table_temp_calc_adjust_run Argument Table + !! \htmlinclude arg_table_temp_calc_adjust_run.html + !! + SUBROUTINE temp_calc_adjust_run(nbox, timestep, temp_level, temp_calc, & + errmsg, errflg) + + integer, intent(in) :: nbox + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(in) :: temp_level(:,:) + REAL(kind_phys), intent(out) :: temp_calc(:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + temp_calc = 1.0_kind_phys + + END SUBROUTINE temp_calc_adjust_run + + !> \section arg_table_temp_calc_adjust_init Argument Table + !! \htmlinclude arg_table_temp_calc_adjust_init.html + !! + subroutine temp_calc_adjust_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_calc_adjust_init + + !> \section arg_table_temp_calc_adjust_finalize Argument Table + !! \htmlinclude arg_table_temp_calc_adjust_finalize.html + !! + subroutine temp_calc_adjust_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_calc_adjust_finalize + +END MODULE temp_calc_adjust diff --git a/test/capgen_test/temp_calc_adjust.meta b/test/capgen_test/temp_calc_adjust.meta new file mode 100644 index 00000000..2ea400c5 --- /dev/null +++ b/test/capgen_test/temp_calc_adjust.meta @@ -0,0 +1,88 @@ +[ccpp-table-properties] + name = temp_calc_adjust + type = scheme + dependencies = foo.F90, bar.F90 + dependencies = qux.F90 +[ccpp-arg-table] + name = temp_calc_adjust_run + type = scheme + process = adjusting +[ nbox ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_level ] + standard_name = potential_temperature_at_interface + units = K + dimensions = (ccpp_constant_one:horizontal_loop_extent, vertical_interface_dimension) + type = real + kind = kind_phys + intent = in +[ temp_calc ] + standard_name = potential_temperature_at_previous_timestep + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = out +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_calc_adjust_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_calc_adjust_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/capgen_test/temp_scheme_files.txt b/test/capgen_test/temp_scheme_files.txt new file mode 100644 index 00000000..c2b56c0f --- /dev/null +++ b/test/capgen_test/temp_scheme_files.txt @@ -0,0 +1,3 @@ +temp_set.meta +temp_adjust.meta +temp_calc_adjust.meta diff --git a/test/capgen_test/temp_set.F90 b/test/capgen_test/temp_set.F90 new file mode 100644 index 00000000..fc2a9f39 --- /dev/null +++ b/test/capgen_test/temp_set.F90 @@ -0,0 +1,111 @@ +!Test 3D parameterization +! + +MODULE temp_set + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: temp_set_init + PUBLIC :: temp_set_timestep_initialize + PUBLIC :: temp_set_run + PUBLIC :: temp_set_finalize + +CONTAINS + +!> \section arg_table_temp_set_run Argument Table +!! \htmlinclude arg_table_temp_set_run.html +!! + SUBROUTINE temp_set_run(ncol, lev, timestep, temp_level, temp, ps, & + errmsg, errflg) +!---------------------------------------------------------------- + IMPLICIT NONE +!---------------------------------------------------------------- + + integer, intent(in) :: ncol, lev + REAL(kind_phys), intent(out) :: temp(:,:) + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(in) :: ps(:) + REAL(kind_phys), INTENT(inout) :: temp_level(:, :) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg +!---------------------------------------------------------------- + integer :: ilev + + integer :: col_index + integer :: lev_index + + errmsg = '' + errflg = 0 + + ilev = size(temp_level, 2) + if (ilev /= (lev + 1)) then + errflg = 1 + errmsg = 'Invalid value for ilev, must be lev+1' + return + end if + + do col_index = 1, ncol + do lev_index = 1, lev + temp(col_index, lev_index) = (temp_level(col_index, lev_index) & + + temp_level(col_index, lev_index + 1)) / 2.0_kind_phys + end do + end do + + END SUBROUTINE temp_set_run + +!> \section arg_table_temp_set_init Argument Table +!! \htmlinclude arg_table_temp_set_init.html +!! + subroutine temp_set_init(temp_inc_in, fudge, temp_inc_set, errmsg, errflg) + + real(kind_phys), intent(in) :: temp_inc_in + real(kind_phys), intent(in) :: fudge + real(kind_phys), intent(out) :: temp_inc_set + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + temp_inc_set = temp_inc_in + + errmsg = '' + errflg = 0 + + end subroutine temp_set_init + +!> \section arg_table_temp_set_timestep_initialize Argument Table +!! \htmlinclude arg_table_temp_set_timestep_initialize.html +!! + subroutine temp_set_timestep_initialize(ncol, temp_inc, temp_level, & + errmsg, errflg) + + integer, intent(in) :: ncol + real(kind_phys), intent(in) :: temp_inc + real(kind_phys), intent(inout) :: temp_level(:,:) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + errmsg = '' + errflg = 0 + + temp_level = temp_level + temp_inc + + end subroutine temp_set_timestep_initialize + +!> \section arg_table_temp_set_finalize Argument Table +!! \htmlinclude arg_table_temp_set_finalize.html +!! + subroutine temp_set_finalize(errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_set_finalize + +END MODULE temp_set diff --git a/test/capgen_test/temp_set.meta b/test/capgen_test/temp_set.meta new file mode 100644 index 00000000..2b8d8827 --- /dev/null +++ b/test/capgen_test/temp_set.meta @@ -0,0 +1,167 @@ +[ccpp-table-properties] + name = temp_set + type = scheme +[ccpp-arg-table] + name = temp_set_run + type = scheme + process = setter +[ ncol ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ lev ] + standard_name = vertical_layer_dimension + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_level ] + standard_name = potential_temperature_at_interface + units = K + dimensions = (ccpp_constant_one:horizontal_loop_extent, vertical_interface_dimension) + type = real + kind = kind_phys + intent = inout +[ temp ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent, vertical_layer_dimension) + type = real + kind = kind_phys + intent = out +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +# Init +[ccpp-arg-table] + name = temp_set_init + type = scheme +[ temp_inc_in ] + standard_name = potential_temperature_increment + long_name = Per time step potential temperature increment + units = K + dimensions = () + type = real + kind = kind_phys + intent = in +[ fudge ] + standard_name = random_fudge_factor + long_name = Ignore this + units = 1 + dimensions = () + type = real + kind = kind_phys + intent = in + default_value = 1.0_kind_phys +[ temp_inc_set ] + standard_name = test_potential_temperature_increment + long_name = Per time step potential temperature increment + units = K + dimensions = () + type = real + kind = kind_phys + intent = out +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +# Timestep Initialization +[ccpp-arg-table] + name = temp_set_timestep_initialize + type = scheme +[ ncol ] + standard_name = horizontal_dimension + type = integer + units = count + dimensions = () + intent = in +[ temp_inc ] + standard_name = test_potential_temperature_increment + long_name = Per time step potential temperature increment + units = K + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_level ] + standard_name = potential_temperature_at_interface + units = K + dimensions = (horizontal_dimension, vertical_interface_dimension) + type = real + kind = kind_phys + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +# Finalize +[ccpp-arg-table] + name = temp_set_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/capgen_test/temp_suite.xml b/test/capgen_test/temp_suite.xml new file mode 100644 index 00000000..9974b02a --- /dev/null +++ b/test/capgen_test/temp_suite.xml @@ -0,0 +1,9 @@ +<?xml version="1.0" encoding="UTF-8"?> + +<suite name="temp_suite" version="1.0"> + <group name="physics"> + <scheme>temp_set</scheme> + <scheme>temp_calc_adjust</scheme> + <scheme>temp_adjust</scheme> + </group> +</suite> diff --git a/test/capgen_test/test_host.F90 b/test/capgen_test/test_host.F90 new file mode 100644 index 00000000..3681ce8b --- /dev/null +++ b/test/capgen_test/test_host.F90 @@ -0,0 +1,421 @@ +module test_prog + + use ccpp_kinds, only: kind_phys + + implicit none + private + + public test_host + + ! Public data and interfaces + integer, public, parameter :: cs = 16 + integer, public, parameter :: cm = 36 + + type, public :: suite_info + character(len=cs) :: suite_name = '' + character(len=cs), pointer :: suite_parts(:) => NULL() + character(len=cm), pointer :: suite_input_vars(:) => NULL() + character(len=cm), pointer :: suite_output_vars(:) => NULL() + character(len=cm), pointer :: suite_required_vars(:) => NULL() + end type suite_info + +CONTAINS + + logical function check_list(test_list, chk_list, list_desc, suite_name) + ! Check a list (<test_list>) against its expected value (<chk_list>) + + ! Dummy arguments + character(len=*), intent(in) :: test_list(:) + character(len=*), intent(in) :: chk_list(:) + character(len=*), intent(in) :: list_desc + character(len=*), optional, intent(in) :: suite_name + + ! Local variables + logical :: found + integer :: num_items + integer :: lindex, tindex + integer, allocatable :: check_unique(:) + character(len=2) :: sep + character(len=256) :: errmsg + + check_list = .true. + errmsg = '' + + ! Check the list size + num_items = size(chk_list) + if (size(test_list) /= num_items) then + write(errmsg, '(a,i0,2a)') 'ERROR: Found ', size(test_list), & + ' ', trim(list_desc) + if (present(suite_name)) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') ' for suite, ', & + trim(suite_name) + end if + write(errmsg(len_trim(errmsg)+1:), '(a,i0)') ', should be ', num_items + write(6, *) trim(errmsg) + errmsg = '' + check_list = .false. + end if + + ! Now, check the list contents for 1-1 correspondence + if (check_list) then + allocate(check_unique(num_items)) + check_unique = -1 + do lindex = 1, num_items + found = .false. + do tindex = 1, num_items + if (trim(test_list(lindex)) == trim(chk_list(tindex))) then + check_unique(tindex) = lindex + found = .true. + exit + end if + end do + if (.not. found) then + check_list = .false. + write(errmsg, '(5a)') 'ERROR: ', trim(list_desc), ' item, ', & + trim(test_list(lindex)), ', was not found' + if (present(suite_name)) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') ' in suite, ', & + trim(suite_name) + end if + write(6, *) trim(errmsg) + errmsg = '' + end if + end do + if (check_list .and. ANY(check_unique < 0)) then + check_list = .false. + write(errmsg, '(3a)') 'ERROR: The following ', trim(list_desc), & + ' items were not found' + if (present(suite_name)) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') ' in suite, ', & + trim(suite_name) + end if + sep = '; ' + do lindex = 1, num_items + if (check_unique(lindex) < 0) then + write(errmsg(len_trim(errmsg)+1:), '(2a)') sep, & + trim(chk_list(lindex)) + sep = ', ' + end if + end do + write(6, *) trim(errmsg) + errmsg = '' + end if + end if + + end function check_list + + logical function check_suite(test_suite) + use test_host_ccpp_cap, only: ccpp_physics_suite_part_list + use test_host_ccpp_cap, only: ccpp_physics_suite_variables + + ! Dummy argument + type(suite_info), intent(in) :: test_suite + ! Local variables + integer :: sind + logical :: check + integer :: errflg + character(len=512) :: errmsg + character(len=128), allocatable :: test_list(:) + + check_suite = .true. + write(6, *) "Checking suite ", trim(test_suite%suite_name) + ! First, check the suite parts + call ccpp_physics_suite_part_list(test_suite%suite_name, test_list, & + errmsg, errflg) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_parts, 'part names', & + suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + ! Check the input variables + call ccpp_physics_suite_variables(test_suite%suite_name, test_list, & + errmsg, errflg, input_vars=.true., output_vars=.false.) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_input_vars, & + 'input variable names', suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + ! Check the output variables + call ccpp_physics_suite_variables(test_suite%suite_name, test_list, & + errmsg, errflg, input_vars=.false., output_vars=.true.) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_output_vars, & + 'output variable names', suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + ! Check all required variables + call ccpp_physics_suite_variables(test_suite%suite_name, test_list, & + errmsg, errflg) + if (errflg == 0) then + check = check_list(test_list, test_suite%suite_required_vars, & + 'required variable names', suite_name=test_suite%suite_name) + else + check = .false. + write(6, '(a,i0,2a)') 'ERROR ', errflg, ': ', trim(errmsg) + end if + check_suite = check_suite .and. check + if (allocated(test_list)) then + deallocate(test_list) + end if + end function check_suite + + + !> \section arg_table_test_host Argument Table + !! \htmlinclude arg_table_test_host.html + !! + subroutine test_host(retval, test_suites) + + use test_host_mod, only: ncols, num_time_steps + use test_host_ccpp_cap, only: test_host_ccpp_physics_initialize + use test_host_ccpp_cap, only: test_host_ccpp_physics_timestep_initial + use test_host_ccpp_cap, only: test_host_ccpp_physics_run + use test_host_ccpp_cap, only: test_host_ccpp_physics_timestep_final + use test_host_ccpp_cap, only: test_host_ccpp_physics_finalize + use test_host_ccpp_cap, only: ccpp_physics_suite_list + use test_host_mod, only: init_data, compare_data, check_model_times + + type(suite_info), intent(in) :: test_suites(:) + logical, intent(out) :: retval + + logical :: check + integer :: col_start, col_end + integer :: index, sind + integer :: time_step + integer :: num_suites + character(len=128), allocatable :: suite_names(:) + character(len=512) :: errmsg + integer :: errflg + + ! Initialize our 'data' + call init_data() + + ! Gather and test the inspection routines + num_suites = size(test_suites) + call ccpp_physics_suite_list(suite_names) + retval = check_list(suite_names, test_suites(:)%suite_name, & + 'suite names') + write(6, *) 'Available suites are:' + do index = 1, size(suite_names) + do sind = 1, num_suites + if (trim(test_suites(sind)%suite_name) == & + trim(suite_names(index))) then + exit + end if + end do + write(6, '(i0,3a,i0,a)') index, ') ', trim(suite_names(index)), & + ' = test_suites(', sind, ')' + end do + if (retval) then + do sind = 1, num_suites + check = check_suite(test_suites(sind)) + retval = retval .and. check + end do + end if + !!! Return here if any check failed + if (.not. retval) then + return + end if + + ! Use the suite information to setup the run + do sind = 1, num_suites + call test_host_ccpp_physics_initialize(test_suites(sind)%suite_name, & + errmsg, errflg) + if (errflg /= 0) then + write(6, '(4a)') 'ERROR in initialize of ', & + trim(test_suites(sind)%suite_name), ': ', trim(errmsg) + end if + end do + ! Loop over time steps + do time_step = 1, num_time_steps + ! Initialize the timestep + do sind = 1, num_suites + if (errflg /= 0) then + exit + end if + if (errflg == 0) then + call test_host_ccpp_physics_timestep_initial( & + test_suites(sind)%suite_name, errmsg, errflg) + end if + if (errflg /= 0) then + write(6, '(3a)') trim(test_suites(sind)%suite_name), ': ', & + trim(errmsg) + exit + end if + if (errflg /= 0) then + exit + end if + end do + + do col_start = 1, ncols, 5 + if (errflg /= 0) then + exit + end if + col_end = MIN(col_start + 4, ncols) + + do sind = 1, num_suites + if (errflg /= 0) then + exit + end if + do index = 1, size(test_suites(sind)%suite_parts) + if (errflg /= 0) then + exit + end if + if (errflg == 0) then + call test_host_ccpp_physics_run( & + test_suites(sind)%suite_name, & + test_suites(sind)%suite_parts(index), & + col_start, col_end, errmsg, errflg) + end if + if (errflg /= 0) then + write(6, '(5a)') trim(test_suites(sind)%suite_name), & + '/', trim(test_suites(sind)%suite_parts(index)), & + ': ', trim(errmsg) + exit + end if + end do + end do + end do + + do sind = 1, num_suites + if (errflg /= 0) then + exit + end if + if (errflg == 0) then + call test_host_ccpp_physics_timestep_final( & + test_suites(sind)%suite_name, errmsg, errflg) + end if + if (errflg /= 0) then + write(6, '(3a)') trim(test_suites(sind)%suite_name), ': ', & + trim(errmsg) + exit + end if + end do + end do ! End time step loop + + do sind = 1, num_suites + if (errflg /= 0) then + exit + end if + if (errflg == 0) then + call test_host_ccpp_physics_finalize( & + test_suites(sind)%suite_name, errmsg, errflg) + end if + if (errflg /= 0) then + write(6, '(3a)') test_suites(sind)%suite_parts(index), ': ', & + trim(errmsg) + write(6,'(2a)') 'An error occurred in ccpp_timestep_final, ', & + 'Exiting...' + exit + end if + end do + + if (errflg == 0) then + ! Run finished without error, check answers + if (.not. check_model_times()) then + write(6, *) 'Model times error!' + errflg = -1 + else if (compare_data()) then + write(6, *) 'Answers are correct!' + errflg = 0 + else + write(6, *) 'Answers are not correct!' + errflg = -1 + end if + end if + + retval = errflg == 0 + + end subroutine test_host + + end module test_prog + + program test + use test_prog, only: test_host, suite_info, cm, cs + + implicit none + + character(len=cs), target :: test_parts1(1) = (/ 'physics ' /) + character(len=cs), target :: test_parts2(1) = (/ 'data_prep ' /) + character(len=cm), target :: test_invars1(6) = (/ & + 'potential_temperature ', & + 'potential_temperature_at_interface ', & + 'surface_air_pressure ', & + 'water_vapor_specific_humidity ', & + 'potential_temperature_increment ', & + 'time_step_for_physics ' /) + character(len=cm), target :: test_outvars1(6) = (/ & + 'potential_temperature ', & + 'potential_temperature_at_interface ', & + 'surface_air_pressure ', & + 'water_vapor_specific_humidity ', & + 'ccpp_error_flag ', & + 'ccpp_error_message ' /) + character(len=cm), target :: test_reqvars1(8) = (/ & + 'potential_temperature ', & + 'potential_temperature_at_interface ', & + 'surface_air_pressure ', & + 'water_vapor_specific_humidity ', & + 'potential_temperature_increment ', & + 'time_step_for_physics ', & + 'ccpp_error_flag ', & + 'ccpp_error_message ' /) + + character(len=cm), target :: test_invars2(3) = (/ & + 'model_times ', & + 'number_of_model_times ', & + 'surface_air_pressure ' /) + + character(len=cm), target :: test_outvars2(4) = (/ & + 'ccpp_error_flag ', & + 'ccpp_error_message ', & + 'model_times ', & + 'number_of_model_times ' /) + + character(len=cm), target :: test_reqvars2(5) = (/ & + 'model_times ', & + 'number_of_model_times ', & + 'surface_air_pressure ', & + 'ccpp_error_flag ', & + 'ccpp_error_message ' /) + type(suite_info) :: test_suites(2) + logical :: run_okay + + ! Setup expected test suite info + test_suites(1)%suite_name = 'temp_suite' + test_suites(1)%suite_parts => test_parts1 + test_suites(1)%suite_input_vars => test_invars1 + test_suites(1)%suite_output_vars => test_outvars1 + test_suites(1)%suite_required_vars => test_reqvars1 + test_suites(2)%suite_name = 'ddt_suite' + test_suites(2)%suite_parts => test_parts2 + test_suites(2)%suite_input_vars => test_invars2 + test_suites(2)%suite_output_vars => test_outvars2 + test_suites(2)%suite_required_vars => test_reqvars2 + + call test_host(run_okay, test_suites) + + if (run_okay) then + STOP 0 + else + STOP -1 + end if + +end program test diff --git a/test/capgen_test/test_host.meta b/test/capgen_test/test_host.meta new file mode 100644 index 00000000..d648baf7 --- /dev/null +++ b/test/capgen_test/test_host.meta @@ -0,0 +1,31 @@ +[ccpp-table-properties] + name = test_host + type = host +[ccpp-arg-table] + name = test_host + type = host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () + protected = True +[ col_end ] + standard_name = horizontal_loop_end + type = integer + units = count + dimensions = () + protected = True +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer diff --git a/test/capgen_test/test_host_data.F90 b/test/capgen_test/test_host_data.F90 new file mode 100644 index 00000000..7a651fca --- /dev/null +++ b/test/capgen_test/test_host_data.F90 @@ -0,0 +1,51 @@ +module test_host_data + + use ccpp_kinds, only: kind_phys + + !> \section arg_table_physics_state Argument Table + !! \htmlinclude arg_table_physics_state.html + type physics_state + real(kind_phys), dimension(:), allocatable :: & + ps ! surface pressure + real(kind_phys), dimension(:,:), allocatable :: & + u, & ! zonal wind (m/s) + v, & ! meridional wind (m/s) + pmid ! midpoint pressure (Pa) + + real(kind_phys), dimension(:,:,:),allocatable :: & + q ! constituent mixing ratio (kg/kg moist or dry air depending on type) + end type physics_state + + public allocate_physics_state + +contains + + subroutine allocate_physics_state(cols, levels, constituents, state) + integer, intent(in) :: cols + integer, intent(in) :: levels + integer, intent(in) :: constituents + type(physics_state), intent(out) :: state + + if (allocated(state%ps)) then + deallocate(state%ps) + end if + allocate(state%ps(cols)) + if (allocated(state%u)) then + deallocate(state%u) + end if + allocate(state%u(cols, levels)) + if (allocated(state%v)) then + deallocate(state%v) + end if + allocate(state%v(cols, levels)) + if (allocated(state%pmid)) then + deallocate(state%pmid) + end if + allocate(state%pmid(cols, levels)) + if (allocated(state%q)) then + deallocate(state%q) + end if + allocate(state%q(cols, levels, constituents)) + + end subroutine allocate_physics_state +end module test_host_data diff --git a/test/capgen_test/test_host_data.meta b/test/capgen_test/test_host_data.meta new file mode 100644 index 00000000..7185aed8 --- /dev/null +++ b/test/capgen_test/test_host_data.meta @@ -0,0 +1,51 @@ +[ccpp-table-properties] + name = physics_state + type = ddt +[ccpp-arg-table] + name = physics_state + type = ddt +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_dimension) +[ u ] + standard_name = eastward_wind + long_name = Zonal wind + state_variable = true + type = real + kind = kind_phys + units = m s-1 + dimensions = (horizontal_dimension, vertical_layer_dimension) +[ v ] + standard_name = northward_wind + long_name = Meridional wind + state_variable = true + type = real + kind = kind_phys + units = m s-1 + dimensions = (horizontal_dimension, vertical_layer_dimension) +[ pmid ] + standard_name = air_pressure + long_name = Midpoint air pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_dimension, vertical_layer_dimension) +[ q ] + standard_name = constituent_mixing_ratio + state_variable = true + type = real + kind = kind_phys + units = kg/kg moist or dry air depending on type + dimensions = (horizontal_dimension, vertical_layer_dimension, number_of_tracers) +[ q(:,:,index_of_water_vapor_specific_humidity) ] + standard_name = water_vapor_specific_humidity + state_variable = true + type = real + kind = kind_phys + units = kg kg-1 + dimensions = (horizontal_dimension, vertical_layer_dimension) diff --git a/test/capgen_test/test_host_mod.F90 b/test/capgen_test/test_host_mod.F90 new file mode 100644 index 00000000..d2f91136 --- /dev/null +++ b/test/capgen_test/test_host_mod.F90 @@ -0,0 +1,139 @@ +module test_host_mod + + use ccpp_kinds, only: kind_phys + use test_host_data, only: physics_state, allocate_physics_state + + implicit none + public + + !> \section arg_table_test_host_mod Argument Table + !! \htmlinclude arg_table_test_host_host.html + !! + integer, parameter :: ncols = 10 + integer, parameter :: pver = 5 + integer, parameter :: pverP = 6 + integer, parameter :: pcnst = 2 + integer, parameter :: DiagDimStart = 2 + integer, parameter :: index_qv = 1 + real(kind_phys), allocatable :: temp_midpoints(:,:) + real(kind_phys) :: temp_interfaces(ncols, pverP) + real(kind_phys), dimension(DiagDimStart:ncols, DiagDimStart:pver) :: & + diag1, & + diag2 + real(kind_phys) :: dt + real(kind_phys), parameter :: temp_inc = 0.05_kind_phys + type(physics_state) :: phys_state + integer :: num_model_times = -1 + integer, allocatable :: model_times(:) + + integer, parameter :: num_time_steps = 2 + real(kind_phys), parameter :: tolerance = 1.0e-13_kind_phys + real(kind_phys) :: tint_save(ncols, pverP) + + public :: init_data + public :: compare_data + public :: check_model_times + +contains + + subroutine init_data() + + integer :: col + integer :: lev + integer :: cind + integer :: offsize + + ! Allocate and initialize temperature + allocate(temp_midpoints(ncols, pver)) + temp_midpoints = 0.0_kind_phys + do lev = 1, pverP + offsize = ((cind - 1) * (ncols * pver)) + ((lev - 1) * ncols) + do col = 1, ncols + temp_interfaces(col, lev) = real(offsize + col, kind=kind_phys) + tint_save(col, lev) = temp_interfaces(col, lev) + end do + end do + ! Allocate and initialize state + call allocate_physics_state(ncols, pver, pcnst, phys_state) + do cind = 1, pcnst + do lev = 1, pver + offsize = ((cind - 1) * (ncols * pver)) + ((lev - 1) * ncols) + do col = 1, ncols + phys_state%q(col, lev, cind) = real(offsize + col, kind=kind_phys) + end do + end do + end do + + end subroutine init_data + + logical function check_model_times() + + check_model_times = (num_model_times > 0) + if (check_model_times) then + check_model_times = (size(model_times) == num_model_times) + if (.not. check_model_times) then + write(6, '(2(a,i0))') 'model_times size mismatch, ', & + size(model_times), ' should be ', num_model_times + end if + else + write(6, '(a,i0,a)') 'num_model_times mismatch, ',num_model_times, & + ' should be greater than zero' + end if + + end function check_model_times + + logical function compare_data() + + integer :: col + integer :: lev + integer :: cind + integer :: offsize + logical :: need_header + real(kind_phys) :: avg + integer, parameter :: cincrements(pcnst) = (/ 1, 0 /) + + compare_data = .true. + + need_header = .true. + do lev = 1, pver + do col = 1, ncols + avg = (tint_save(col,lev) + tint_save(col,lev+1)) + avg = 1.0_kind_phys + (avg / 2.0_kind_phys) + avg = avg + (temp_inc * num_time_steps) + if (abs((temp_midpoints(col, lev) - avg) / avg) > tolerance) then + if (need_header) then + write(6, '(" COL LEV T MIDPOINTS EXPECTED")') + need_header = .false. + end if + write(6, '(2i5,2(3x,es15.7))') col, lev, & + temp_midpoints(col, lev), avg + compare_data = .false. + end if + end do + end do + ! Check constituents + need_header = .true. + do cind = 1, pcnst + do lev = 1, pver + offsize = ((cind - 1) * (ncols * pver)) + ((lev - 1) * ncols) + do col = 1, ncols + avg = real(offsize + col + (cincrements(cind) * num_time_steps), & + kind=kind_phys) + if (abs((phys_state%q(col, lev, cind) - avg) / avg) > & + tolerance) then + if (need_header) then + write(6, '(2(2x,a),3x,a,10x,a,14x,a)') & + 'COL', 'LEV', 'C#', 'Q', 'EXPECTED' + need_header = .false. + end if + write(6, '(3i5,2(3x,es15.7))') col, lev, cind, & + phys_state%q(col, lev, cind), avg + compare_data = .false. + end if + end do + end do + end do + + end function compare_data + +end module test_host_mod diff --git a/test/capgen_test/test_host_mod.meta b/test/capgen_test/test_host_mod.meta new file mode 100644 index 00000000..0d53f320 --- /dev/null +++ b/test/capgen_test/test_host_mod.meta @@ -0,0 +1,92 @@ +[ccpp-table-properties] + name = test_host_mod + type = module +[ccpp-arg-table] + name = test_host_mod + type = module +[ index_qv ] + standard_name = index_of_water_vapor_specific_humidity + units = index + type = integer + protected = True + dimensions = () +[ ncols] + standard_name = horizontal_dimension + units = count + type = integer + protected = True + dimensions = () +[ pver ] + standard_name = vertical_layer_dimension + units = count + type = integer + protected = True + dimensions = () +[ pverP ] + standard_name = vertical_interface_dimension + type = integer + units = count + protected = True + dimensions = () +[ pcnst ] + standard_name = number_of_tracers + type = integer + units = count + protected = True + dimensions = () +[ DiagDimStart ] + standard_name = first_index_of_diag_fields + type = integer + units = count + protected = True + dimensions = () +[ temp_midpoints ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_dimension, vertical_layer_dimension) + type = real | kind = kind_phys +[ temp_interfaces ] + standard_name = potential_temperature_at_interface + units = K + dimensions = (horizontal_dimension, vertical_interface_dimension) + type = real | kind = kind_phys +[ diag1 ] + standard_name = diagnostic_stuff_type_1 + long_name = This is just a test field + units = K + dimensions = (first_index_of_diag_fields:horizontal_dimension, first_index_of_diag_fields:vertical_layer_dimension) + type = real | kind = kind_phys +[ diag2 ] + standard_name = diagnostic_stuff_type_2 + long_name = This is just a test field + units = K + dimensions = (first_index_of_diag_fields: horizontal_dimension, first_index_of_diag_fields :vertical_layer_dimension) + type = real | kind = kind_phys +[ dt ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real | kind = kind_phys +[ temp_inc ] + standard_name = potential_temperature_increment + long_name = Per time step potential temperature increment + units = K + dimensions = () + type = real | kind = kind_phys +[ phys_state ] + standard_name = physics_state_derived_type + long_name = Physics State DDT + type = physics_state + dimensions = () +[ num_model_times ] + standard_name = number_of_model_times + type = integer + units = count + dimensions = () +[ model_times ] + standard_name = model_times + units = seconds + dimensions = (number_of_model_times) + type = integer + allocatable = True diff --git a/test/capgen_test/test_reports.py b/test/capgen_test/test_reports.py new file mode 100644 index 00000000..43adbe38 --- /dev/null +++ b/test/capgen_test/test_reports.py @@ -0,0 +1,185 @@ +#! /usr/bin/env python +""" +----------------------------------------------------------------------- + Description: Test capgen database report python interface + + Assumptions: + + Command line arguments: build_dir database_filepath + + Usage: python test_reports <build_dir> <database_filepath> +----------------------------------------------------------------------- +""" +import sys +import os + +_TEST_DIR = os.path.dirname(os.path.abspath(__file__)) +_FRAMEWORK_DIR = os.path.abspath(os.path.join(_TEST_DIR, os.pardir, os.pardir)) +_SCRIPTS_DIR = os.path.join(_FRAMEWORK_DIR, "scripts") +_SRC_DIR = os.path.join(_FRAMEWORK_DIR, "src") + +if not os.path.exists(_SCRIPTS_DIR): + raise ImportError("Cannot find scripts directory") +# end if + +sys.path.append(_SCRIPTS_DIR) +# pylint: disable=wrong-import-position +from ccpp_datafile import datatable_report, DatatableReport +# pylint: enable=wrong-import-position + +def usage(errmsg=None): + """Raise an exception with optional error message and usage message""" + emsg = "usage: {} <build_dir> <database_filepath>" + if errmsg: + emsg = errmsg + '\n' + emsg + # end if + raise ValueError(emsg.format(sys.argv[0])) + +if len(sys.argv) != 3: + usage() +# end if + +_BUILD_DIR = os.path.abspath(sys.argv[1]) +_DATABASE = os.path.abspath(sys.argv[2]) +if not os.path.isdir(_BUILD_DIR): + _EMSG = "<build_dir> must be an existing build directory" + usage(_EMSG) +# end if +if (not os.path.exists(_DATABASE)) or (not os.path.isfile(_DATABASE)): + _EMSG = "<database_filepath> must be an existing CCPP database file" + usage(_EMSG) +# end if + +# Check data +_HOST_FILES = [os.path.join(_BUILD_DIR, "ccpp", "test_host_ccpp_cap.F90")] +_SUITE_FILES = [os.path.join(_BUILD_DIR, "ccpp", "ccpp_ddt_suite_cap.F90"), + os.path.join(_BUILD_DIR, "ccpp", "ccpp_temp_suite_cap.F90")] +_UTILITY_FILES = [os.path.join(_BUILD_DIR, "ccpp", "ccpp_kinds.F90"), + os.path.join(_SRC_DIR, "ccpp_constituent_prop_mod.F90"), + os.path.join(_SRC_DIR, "ccpp_hashable.F90"), + os.path.join(_SRC_DIR, "ccpp_hash_table.F90")] +_CCPP_FILES = _UTILITY_FILES + \ + [os.path.join(_BUILD_DIR, "ccpp", "test_host_ccpp_cap.F90"), + os.path.join(_BUILD_DIR, "ccpp", "ccpp_ddt_suite_cap.F90"), + os.path.join(_BUILD_DIR, "ccpp", "ccpp_temp_suite_cap.F90")] +_PROCESS_LIST = ["setter=temp_set", "adjusting=temp_calc_adjust"] +_MODULE_LIST = ["environ_conditions", "make_ddt", "temp_adjust", + "temp_calc_adjust", "temp_set"] +_SUITE_LIST = ["ddt_suite", "temp_suite"] +_INPUT_VARS_DDT = ["model_times", "number_of_model_times", + "horizontal_loop_begin", "horizontal_loop_end", + "surface_air_pressure", "horizontal_dimension"] +_OUTPUT_VARS_DDT = ["ccpp_error_flag", "ccpp_error_message", "model_times", + "number_of_model_times"] +_REQUIRED_VARS_DDT = _INPUT_VARS_DDT + _OUTPUT_VARS_DDT +_PROT_VARS_TEMP = ["horizontal_loop_begin", "horizontal_loop_end", + "horizontal_dimension", "vertical_layer_dimension"] +_REQUIRED_VARS_TEMP = ["ccpp_error_flag", "ccpp_error_message", + "potential_temperature", + "potential_temperature_at_interface", + "potential_temperature_increment", + "surface_air_pressure", "time_step_for_physics", + "water_vapor_specific_humidity"] +_INPUT_VARS_TEMP = ["potential_temperature", + "potential_temperature_at_interface", + "potential_temperature_increment", + "surface_air_pressure", "time_step_for_physics", + "water_vapor_specific_humidity"] +_OUTPUT_VARS_TEMP = ["ccpp_error_flag", "ccpp_error_message", + "potential_temperature", + "potential_temperature_at_interface", + "surface_air_pressure", "water_vapor_specific_humidity"] + +def fields_string(field_type, field_list, sep): + """Create an error string for <field_type> field(s), <field_list>. + <sep> is used to separate items in <field_list>""" + indent = ' '*11 + if field_list: + if len(field_list) > 1: + field_str = "{} Fields: ".format(field_type) + else: + field_str = "{} Field: ".format(field_type) + # end if + fmsg = "\n{}{}{}".format(indent, field_str, sep.join(field_list)) + else: + fmsg = "" + # end if + return fmsg + +def check_datatable(database, report_type, check_list, + sep=',', excl_prot=False): + """Run a database report and check the return string. + If an error is found, print an error message. + Return the number of errors""" + if sep is None: + sep = ',' + # end if + test_str = datatable_report(database, report_type, sep, excl_prot=excl_prot) + test_list = [x for x in test_str.split(sep) if x] + missing = list() + unexpected = list() + for item in check_list: + if item not in test_list: + missing.append(item) + # end if + # end for + for item in test_list: + if item not in check_list: + unexpected.append(item) + # end if + # end for + if missing or unexpected: + vmsg = "ERROR in {} datafile check:".format(report_type.action) + vmsg += fields_string("Missing", missing, sep) + vmsg += fields_string("Unexpected", unexpected, sep) + print(vmsg) + else: + print("{} report okay".format(report_type.action)) + # end if + return len(missing) + len(unexpected) + +NUM_ERRORS = 0 +print("Checking required files from python:") +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("host_files"), + _HOST_FILES) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("suite_files"), + _SUITE_FILES) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("utility_files"), + _UTILITY_FILES) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("ccpp_files"), + _CCPP_FILES) +print("\nChecking lists from python") +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("process_list"), + _PROCESS_LIST) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("module_list"), + _MODULE_LIST) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("suite_list"), + _SUITE_LIST) +print("\nChecking variables for DDT suite from python") +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("required_variables", + value="ddt_suite"), + _REQUIRED_VARS_DDT) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("input_variables", + value="ddt_suite"), + _INPUT_VARS_DDT) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("output_variables", + value="ddt_suite"), + _OUTPUT_VARS_DDT) +print("\nChecking variables for temp suite from python") +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("required_variables", + value="temp_suite"), + _REQUIRED_VARS_TEMP + _PROT_VARS_TEMP) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("required_variables", + value="temp_suite"), + _REQUIRED_VARS_TEMP, excl_prot=True) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("input_variables", + value="temp_suite"), + _INPUT_VARS_TEMP + _PROT_VARS_TEMP) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("input_variables", + value="temp_suite"), + _INPUT_VARS_TEMP, excl_prot=True) +NUM_ERRORS += check_datatable(_DATABASE, DatatableReport("output_variables", + value="temp_suite"), + _OUTPUT_VARS_TEMP) + +sys.exit(NUM_ERRORS) diff --git a/test/hash_table_tests/Makefile b/test/hash_table_tests/Makefile new file mode 100644 index 00000000..5db42275 --- /dev/null +++ b/test/hash_table_tests/Makefile @@ -0,0 +1,38 @@ +SHELL = /bin/sh + +INCFLAG = -I +INCPATH += $(INCFLAG). +FCFLAGS += -g + +SRCPATH = ../../src +HASHPATH = $(SRCPATH) + +HASHOBJS = ccpp_hashable.o ccpp_hash_table.o + +# Make sure we have a log file +ifeq ($(LOGFILE),) +LOGFILE := ccpp_test.log +endif + +# TARGETS + +ccpp_hashable.o: $(HASHPATH)/ccpp_hashable.F90 + @echo "${FC} -c ${FCFLAGS} ${INCPATH} $^" 2>&1 >> $(LOGFILE) + @${FC} -c ${FCFLAGS} ${INCPATH} $^ 2>&1 >> $(LOGFILE) + +ccpp_hash_table.o: $(HASHPATH)/ccpp_hash_table.F90 + @echo "${FC} -c ${FCFLAGS} ${INCPATH} $^" 2>&1 >> $(LOGFILE) + @${FC} -c ${FCFLAGS} ${INCPATH} $^ 2>&1 >> $(LOGFILE) + +test_hash_table: test_hash.F90 $(HASHOBJS) + @echo "${FC} ${FCFLAGS} ${INCPATH} -o $@ $^" 2>&1 >> $(LOGFILE) + @${FC} ${FCFLAGS} ${INCPATH} -o $@ $^ 2>&1 >> $(LOGFILE) + +test: test_hash_table + @echo "Run Hash Table Tests" + @./test_hash_table + +# CLEAN +clean: + @rm -f *.o *.mod ccpp_test.log + @rm -f test_hash_table diff --git a/test/hash_table_tests/test_hash.F90 b/test/hash_table_tests/test_hash.F90 new file mode 100644 index 00000000..883ab17b --- /dev/null +++ b/test/hash_table_tests/test_hash.F90 @@ -0,0 +1,214 @@ +module test_hash_utils + use ccpp_hashable, only: ccpp_hashable_char_t + + implicit none + private + + public :: test_table + + integer, parameter, public :: max_terrs = 16 + + type, public :: hash_object_t + type(ccpp_hashable_char_t), pointer :: item => NULL() + end type hash_object_t + + private add_error + +CONTAINS + + subroutine add_error(msg, num_errs, errors) + ! Dummy arguments + character(len=*), intent(in) :: msg + integer, intent(inout) :: num_errs + character(len=*), intent(inout) :: errors(:) + + if (num_errs < max_terrs) then + num_errs = num_errs + 1 + write(errors(num_errs), *) trim(msg) + end if + + end subroutine add_error + + subroutine test_table(hash_table, table_size, num_tests, num_errs, errors) + use ccpp_hash_table, only: ccpp_hash_table_t, ccpp_hash_iterator_t + use ccpp_hashable, only: ccpp_hashable_t, new_hashable_char + + ! Dummy arguments + type(ccpp_hash_table_t), target, intent(inout) :: hash_table + integer, intent(in) :: table_size + integer, intent(out) :: num_tests + integer, intent(out) :: num_errs + character(len=*), intent(inout) :: errors(:) + ! Local variables + integer, parameter :: num_test_entries = 4 + integer, parameter :: key_len = 10 + character(len=key_len) :: hash_names(num_test_entries) = (/ & + 'foo ', 'bar ', 'foobar ', 'big daddy ' /) + logical :: hash_found(num_test_entries) + + type(hash_object_t) :: hash_chars(num_test_entries) + class(ccpp_hashable_t), pointer :: test_ptr => NULL() + type(ccpp_hash_iterator_t) :: hash_iter + character(len=key_len) :: test_key + character(len=len(errors(1))) :: errmsg + integer :: index + + write(6, '(a,i0)') "Testing hash table, size = ", table_size + num_tests = 0 + num_errs = 0 + ! Make sure hash table is *not* initialized + if (hash_table%is_initialized()) then + call add_error("Error: hash table initialized too early", & + num_errs, errors) + end if + num_tests = num_tests + 1 + ! Initialize hash table + call hash_table%initialize(table_size) + ! Make sure hash table is *is* initialized + if (.not. hash_table%is_initialized()) then + call add_error("Error: hash table *not* initialized", num_errs, errors) + end if + num_tests = num_tests + 1 + do index = 1, num_test_entries + call new_hashable_char(hash_names(index), hash_chars(index)%item) + call hash_table%add_hash_key(hash_chars(index)%item, & + errmsg=errors(num_errs + 1)) + if (len_trim(errors(num_errs + 1)) > 0) then + num_errs = num_errs + 1 + end if + if (num_errs > max_terrs) then + exit + end if + end do + + if (num_errs == 0) then + ! We have populated the table, let's do some tests + ! First, make sure we can find existing entries + do index = 1, num_test_entries + test_ptr => hash_table%table_value(hash_names(index), & + errmsg=errors(num_errs + 1)) + if (len_trim(errors(num_errs + 1)) > 0) then + num_errs = num_errs + 1 + else if (trim(test_ptr%key()) /= trim(hash_names(index))) then + num_errs = num_errs + 1 + write(errmsg, *) "ERROR: Found '", trim(test_ptr%key()), & + "', expected '", trim(hash_names(index)), "'" + call add_error(trim(errmsg), num_errs, errors) + end if + if (num_errs > max_terrs) then + exit + end if + end do + num_tests = num_tests + 1 + ! Next, make sure we do not find a non-existent entry + test_ptr => hash_table%table_value(trim(hash_names(1))//'_oops', & + errmsg=errors(num_errs + 1)) + if (len_trim(errors(num_errs + 1)) == 0) then + write(errmsg, *) "ERROR: Found an entry for '", & + trim(hash_names(1))//'_oops', "'" + call add_error(trim(errmsg), num_errs, errors) + end if + num_tests = num_tests + 1 + ! Make sure we get an error if we try to add a duplicate key + call hash_table%add_hash_key(hash_chars(2)%item, & + errmsg=errors(num_errs + 1)) + if (len_trim(errors(num_errs + 1)) == 0) then + num_errs = num_errs + 1 + write(errors(num_errs), *) & + "ERROR: Allowed duplicate entry for '", & + hash_chars(2)%item%key(), "'" + end if + num_tests = num_tests + 1 + ! Check that the total number of table entries is correct + if (hash_table%num_values() /= num_test_entries) then + write(errmsg, '(2(a,i0))') "ERROR: Wrong table value count, ", & + hash_table%num_values(), ', should be ', num_test_entries + call add_error(errmsg, num_errs, errors) + end if + num_tests = num_tests + 1 + ! Test iteration through hash table + hash_found(:) = .false. + call hash_iter%initialize(hash_table) + num_tests = num_tests + 1 + do + if (hash_iter%valid()) then + test_key = hash_iter%key() + index = 1 + do + if (trim(test_key) == trim(hash_names(index))) then + hash_found(index) = .true. + exit + else if (index >= num_test_entries) then + write(errmsg, '(3a)') & + "ERROR: Unexpected table entry, '", & + trim(test_key), "'" + call add_error(errmsg, num_errs, errors) + end if + index = index + 1 + end do + call hash_iter%next() + else + exit + end if + end do + if (ANY(.not. hash_found)) then + write(errmsg, '(a,i0,a)') "ERROR: ", & + COUNT(.not. hash_found), " test keys not found in table." + call add_error(errmsg, num_errs, errors) + end if + end if + ! Finally, clear the hash table (should deallocate everything) + call hash_table%clear() + ! Make sure hash table is *not* initialized + if (hash_table%is_initialized()) then + call add_error("Error: hash table initialized after clear", & + num_errs, errors) + end if + num_tests = num_tests + 1 + ! Cleanup + do index = 1, num_test_entries + deallocate(hash_chars(index)%item) + end do + + end subroutine test_table + +end module test_hash_utils + +program test_hash + use ccpp_hash_table, only: ccpp_hash_table_t + use test_hash_utils, only: test_table, max_terrs + + integer, parameter :: num_table_sizes = 5 + integer, parameter :: max_errs = max_terrs * num_table_sizes + integer, parameter :: err_size = 128 + integer, parameter :: test_sizes(num_table_sizes) = (/ & + 0, 1, 2, 4, 20 /) + + type(ccpp_hash_table_t), target :: hash_table + integer :: index + integer :: errcnt = 0 + integer :: num_tests = 0 + integer :: total_errcnt = 0 + integer :: total_tests = 0 + character(len=err_size) :: errors(max_errs) + + errors = '' + do index = 1, num_table_sizes + call test_table(hash_table, test_sizes(index), num_tests, errcnt, & + errors(total_errcnt+1:)) + total_tests = total_tests + num_tests + total_errcnt = total_errcnt + errcnt + end do + + if (total_errcnt > 0) then + write(6, '(a,i0,a)') 'FAIL, ', total_errcnt, ' errors found' + do index = 1, total_errcnt + write(6, *) trim(errors(index)) + end do + STOP 1 + else + write(6, '(a,i0,a)') "All ", total_tests, " hash table tests passed!" + STOP 0 + end if + +end program test_hash diff --git a/test/pylint_test.sh b/test/pylint_test.sh new file mode 100755 index 00000000..a8bf3f90 --- /dev/null +++ b/test/pylint_test.sh @@ -0,0 +1,28 @@ +#! /bin/bash + +# Script to run pylint tests on CCPP Framework python scripts + +# Add CCPP Framework paths to PYTHONPATH so pylint can find them +SCRIPTDIR="$( cd $( dirname ${0} ); pwd -P )" +SPINROOT="$( dirname ${SCRIPTDIR} )" +CCPPDIR="${SPINROOT}/scripts" +export PYTHONPATH="${CCPPDIR}:$PYTHONPATH" + +pylintcmd="pylint --rcfile=${SCRIPTDIR}/.pylintrc" + +# Test top-level scripts +scripts="${CCPPDIR}/ccpp_capgen.py" +scripts="${scripts} ${CCPPDIR}/ccpp_suite.py" +scripts="${scripts} ${CCPPDIR}/ddt_library.py" +scripts="${scripts} ${CCPPDIR}/host_cap.py" +scripts="${scripts} ${CCPPDIR}/host_model.py" +scripts="${scripts} ${CCPPDIR}/metadata_table.py" +scripts="${scripts} ${CCPPDIR}/metavar.py" +scripts="${scripts} ${CCPPDIR}/state_machine.py" +${pylintcmd} ${scripts} +# Test the fortran_tools module +${pylintcmd} ${CCPPDIR}/fortran_tools +# Test the parse_tools module +${pylintcmd} ${CCPPDIR}/parse_tools +# Test the fortran to metadata converter tool +${pylintcmd} ${CCPPDIR}/ccpp_fortran_to_metadata.py diff --git a/test/run_doctest.sh b/test/run_doctest.sh new file mode 100755 index 00000000..1eecb32f --- /dev/null +++ b/test/run_doctest.sh @@ -0,0 +1,29 @@ +#! /bin/bash + +root=$( dirname $( cd $( dirname ${0}); pwd -P ) ) +scripts=${root}/scripts + +perr() { + # Print error message ($2) on error ($1) + if [ ${1} -ne 0 ]; then + echo "ERROR: ${2}" + if [ $# -gt 2 ]; then + exit ${3} + else + exit 1 + fi + fi +} + +cd ${scripts} +perr $? "Cannot cd to scripts directory, '${scripts}'" + +export PYTHONPATH="${scripts}:${PYTHONPATH}" +# Find all python scripts that have doctest +for pyfile in $(find . -name \*.py); do + if [ -f "${pyfile}" ]; then + if [ $(grep -c doctest ${pyfile}) -ne 0 ]; then + python3 ${pyfile} + fi + fi +done diff --git a/test/unit_tests/README.md b/test/unit_tests/README.md new file mode 100644 index 00000000..cfd17c74 --- /dev/null +++ b/test/unit_tests/README.md @@ -0,0 +1,45 @@ +# How to build the test/capgen_test (on hera) + +## Quick start: +``` +cd test/capgen_test +mkdir build +cd build +cmake .. +make +./test_host +``` + +The command to run ccpp_capgen.py is: + +`<root>/scripts/ccpp_capgen.py \ + --host-files test_host_data.meta,test_host_mod.meta,test_host.meta \ + --scheme-files temp_scheme_files.txt,ddt_suite_files.txt \ + --suites ddt_suite.xml,temp_suite.xml\ + --output-root <root>/test/capgen_test/build/ccpp\ + --generate-host-cap` + +where `<root>` is the path to your ccpp/framework directory. + +Modify a *meta* file in `capgen_test` and write a test that passes when fixed. + +To run the unit tests: +``` +cd <root>/test/unit_tests +python test_metadata_table.py +``` +For more verbose output: +``` +python test_metadata_table.py -v +``` +If you have `coverage` installed, to get test coverage: +``` +coverage run test_metadata_table.py +coverage report -m +``` +To check source code quality with pylint: +``` +cd <root> +env PYTHONPATH=scripts:${PYTHONPATH} pylint --rcfile ./test/.pylintrc ./test/unit_tests/test_metadata_table.py +env PYTHONPATH=scripts:${PYTHONPATH} pylint --rcfile ./test/.pylintrc ./test/unit_tests/test_metadata_scheme_file.py +``` diff --git a/test/unit_tests/sample_files/double_header.meta b/test/unit_tests/sample_files/double_header.meta new file mode 100644 index 00000000..27051c17 --- /dev/null +++ b/test/unit_tests/sample_files/double_header.meta @@ -0,0 +1,12 @@ +[ccpp-table-properties] + name = test_host + type = host + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host +[ccpp-arg-table] + name = test_host + type = host diff --git a/test/unit_tests/sample_files/double_table_properties.meta b/test/unit_tests/sample_files/double_table_properties.meta new file mode 100644 index 00000000..28637b36 --- /dev/null +++ b/test/unit_tests/sample_files/double_table_properties.meta @@ -0,0 +1,13 @@ +[ccpp-table-properties] + name = test_host + type = host + dependencies = +[ccpp-table-properties] + name = test_host + type = host + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host diff --git a/test/unit_tests/sample_files/missing_table_properties.meta b/test/unit_tests/sample_files/missing_table_properties.meta new file mode 100644 index 00000000..0bef09dc --- /dev/null +++ b/test/unit_tests/sample_files/missing_table_properties.meta @@ -0,0 +1,3 @@ +[ccpp-arg-table] + name = test_host + type = host diff --git a/test/unit_tests/sample_files/test_bad_1st_arg_table_header.meta b/test/unit_tests/sample_files/test_bad_1st_arg_table_header.meta new file mode 100644 index 00000000..ac6468ac --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_1st_arg_table_header.meta @@ -0,0 +1,23 @@ +[ccpp-table-properties] + name = vmr_type + type = ddt + dependencies = + +######################################################################## +[ccpp-farg-table] + name = vmr_type + type = ddt +[ nvmr ] + standard_name = number_of_chemical_species + units = count + dimensions = () + type = integer +[ccpp-arg-table] + name = make_ddt_run + type = scheme +[ nbox ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in diff --git a/test/unit_tests/sample_files/test_bad_2nd_arg_table_header.meta b/test/unit_tests/sample_files/test_bad_2nd_arg_table_header.meta new file mode 100644 index 00000000..48381744 --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_2nd_arg_table_header.meta @@ -0,0 +1,23 @@ +[ccpp-table-properties] + name = vmr_type + type = ddt + dependencies = + +######################################################################## +[ccpp-arg-table] + name = vmr_type + type = ddt +[ nvmr ] + standard_name = number_of_chemical_species + units = count + dimensions = () + type = integer +[ccpp-farg-table] + name = make_ddt_run + type = scheme +[ nbox ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in diff --git a/test/unit_tests/sample_files/test_bad_dimension.meta b/test/unit_tests/sample_files/test_bad_dimension.meta new file mode 100644 index 00000000..9a20b1f8 --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_dimension.meta @@ -0,0 +1,15 @@ +[ccpp-table-properties] + name = test_host + type = host + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = banana + protected = True diff --git a/test/unit_tests/sample_files/test_bad_line_split.meta b/test/unit_tests/sample_files/test_bad_line_split.meta new file mode 100644 index 00000000..3ace2ccb --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_line_split.meta @@ -0,0 +1,16 @@ +[ccpp-table-properties] + name = temp_calc_adjust + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = temp_calc_adjust_run + type = scheme +[ temp_calc ] + standard_name = potential_temperature_at_previous_timestep + units = K | + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = out diff --git a/test/unit_tests/sample_files/test_bad_table_key.meta b/test/unit_tests/sample_files/test_bad_table_key.meta new file mode 100644 index 00000000..8d0bbc7f --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_table_key.meta @@ -0,0 +1,10 @@ +[ccpp-table-properties] + name = test_host + type = host + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host + banana = something diff --git a/test/unit_tests/sample_files/test_bad_table_type.meta b/test/unit_tests/sample_files/test_bad_table_type.meta new file mode 100644 index 00000000..b6b9449d --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_table_type.meta @@ -0,0 +1,15 @@ +[ccpp-table-properties] + name = test_host + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () + protected = True diff --git a/test/unit_tests/sample_files/test_bad_type_name.meta b/test/unit_tests/sample_files/test_bad_type_name.meta new file mode 100644 index 00000000..de11315d --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_type_name.meta @@ -0,0 +1,9 @@ +[ccpp-table-properties] + name = test_host + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = banana diff --git a/test/unit_tests/sample_files/test_bad_var_property_name.meta b/test/unit_tests/sample_files/test_bad_var_property_name.meta new file mode 100644 index 00000000..619e7b83 --- /dev/null +++ b/test/unit_tests/sample_files/test_bad_var_property_name.meta @@ -0,0 +1,35 @@ +[ccpp-table-properties] + name = vmr_type + type = ddt + dependencies = + +######################################################################## +[ccpp-arg-table] + name = vmr_type + type = ddt +[ nvmr ] + standard_name = number_of_chemical_species + units = count + dimensions = () + type = integer +[ vmr_array ] + standard_name = array_of_volume_mixing_ratios + units = ppmv + dimensions = (horizontal_loop_extent, number_of_chemical_species) + type = real + kind = kind_phys + +[ccpp-table-properties] + name = make_ddt + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = make_ddt_run + type = scheme +[ vmr ] + standard_name = volume_mixing_ratio_ddt + dimensions = () + None = None + intent = inout diff --git a/test/unit_tests/sample_files/test_dependencies_rel_path.meta b/test/unit_tests/sample_files/test_dependencies_rel_path.meta new file mode 100644 index 00000000..d1970cd4 --- /dev/null +++ b/test/unit_tests/sample_files/test_dependencies_rel_path.meta @@ -0,0 +1,11 @@ +[ccpp-table-properties] + name = test_host + type = host + relative_path = ../../ccpp/physics/physics + dependencies = machine.F,physcons.F90,, + dependencies = GFDL_parse_tracers.F90,,rte-rrtmgp/rrtmgp/mo_gas_optics_rrtmgp.F90 + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host diff --git a/test/unit_tests/sample_files/test_duplicate_variable.meta b/test/unit_tests/sample_files/test_duplicate_variable.meta new file mode 100644 index 00000000..efc66b86 --- /dev/null +++ b/test/unit_tests/sample_files/test_duplicate_variable.meta @@ -0,0 +1,21 @@ +[ccpp-table-properties] + name = temp_calc_adjust + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = temp_calc_adjust_run + type = scheme +[ temp ] + standard_name = index_of_water_vapor_specific_humidity + units = index + type = integer + intent = in + dimensions = () +[ temp ] + standard_name = index_of_water_vapor_specific_humidity + units = index + type = integer + intent = in + dimensions = () diff --git a/test/unit_tests/sample_files/test_host.meta b/test/unit_tests/sample_files/test_host.meta new file mode 100644 index 00000000..b0a7d603 --- /dev/null +++ b/test/unit_tests/sample_files/test_host.meta @@ -0,0 +1,34 @@ +[ccpp-table-properties] + name = test_host + type = host + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () + protected = True +[ col_end ] + standard_name = horizontal_loop_end + type = integer + units = count + dimensions = () + protected = True +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer diff --git a/test/unit_tests/sample_files/test_invalid_intent.meta b/test/unit_tests/sample_files/test_invalid_intent.meta new file mode 100644 index 00000000..6f11169e --- /dev/null +++ b/test/unit_tests/sample_files/test_invalid_intent.meta @@ -0,0 +1,23 @@ +[ccpp-table-properties] + name = temp_calc_adjust + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = temp_calc_adjust_run + type = scheme +[ nbox ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = banana diff --git a/test/unit_tests/sample_files/test_invalid_table_properties_type.meta b/test/unit_tests/sample_files/test_invalid_table_properties_type.meta new file mode 100644 index 00000000..45f43e83 --- /dev/null +++ b/test/unit_tests/sample_files/test_invalid_table_properties_type.meta @@ -0,0 +1,9 @@ +[ccpp-table-properties] + name = test_host + type = banana + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host diff --git a/test/unit_tests/sample_files/test_mismatch_section_table_title.meta b/test/unit_tests/sample_files/test_mismatch_section_table_title.meta new file mode 100644 index 00000000..240b93c2 --- /dev/null +++ b/test/unit_tests/sample_files/test_mismatch_section_table_title.meta @@ -0,0 +1,9 @@ +[ccpp-table-properties] + name = banana + type = host + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host + type = host diff --git a/test/unit_tests/sample_files/test_missing_intent.meta b/test/unit_tests/sample_files/test_missing_intent.meta new file mode 100644 index 00000000..57be3ad6 --- /dev/null +++ b/test/unit_tests/sample_files/test_missing_intent.meta @@ -0,0 +1,22 @@ +[ccpp-table-properties] + name = temp_calc_adjust + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = temp_calc_adjust_run + type = scheme +[ nbox ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys diff --git a/test/unit_tests/sample_files/test_missing_table_name.meta b/test/unit_tests/sample_files/test_missing_table_name.meta new file mode 100644 index 00000000..b8deb22c --- /dev/null +++ b/test/unit_tests/sample_files/test_missing_table_name.meta @@ -0,0 +1,14 @@ +[ccpp-table-properties] + name = test_missing_table_name + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + type = host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () + protected = True diff --git a/test/unit_tests/sample_files/test_missing_table_type.meta b/test/unit_tests/sample_files/test_missing_table_type.meta new file mode 100644 index 00000000..98b5bd4f --- /dev/null +++ b/test/unit_tests/sample_files/test_missing_table_type.meta @@ -0,0 +1,14 @@ +[ccpp-table-properties] + name = test_host + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = test_host +[ col_start ] + standard_name = horizontal_loop_begin + type = integer + units = count + dimensions = () + protected = True diff --git a/test/unit_tests/sample_files/test_missing_units.meta b/test/unit_tests/sample_files/test_missing_units.meta new file mode 100644 index 00000000..1b9546f2 --- /dev/null +++ b/test/unit_tests/sample_files/test_missing_units.meta @@ -0,0 +1,16 @@ +[ccpp-table-properties] + name = temp_calc_adjust + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = temp_calc_adjust_run + type = scheme +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + dimensions = () + type = real + kind = kind_phys + intent = in diff --git a/test/unit_tests/sample_files/test_multi_ccpp_arg_tables.meta b/test/unit_tests/sample_files/test_multi_ccpp_arg_tables.meta new file mode 100644 index 00000000..7d81172e --- /dev/null +++ b/test/unit_tests/sample_files/test_multi_ccpp_arg_tables.meta @@ -0,0 +1,115 @@ +[ccpp-table-properties] + name = vmr_type + type = ddt + dependencies = + +[ccpp-arg-table] + name = vmr_type + type = ddt +[ nvmr ] + standard_name = number_of_chemical_species + units = count + dimensions = () + type = integer +[ vmr_array ] + standard_name = array_of_volume_mixing_ratios + units = ppmv + dimensions = (horizontal_loop_extent, number_of_chemical_species) + type = real + kind = kind_phys + +######################################################################## +[ccpp-table-properties] + name = make_ddt + type = scheme + dependencies = + +[ccpp-arg-table] + name = make_ddt_run + type = scheme +[ nbox ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ O3 ] + standard_name = ozone + units = ppmv + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ HNO3 ] + standard_name = nitric_acid + units = ppmv + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ vmr ] + standard_name = volume_mixing_ratio_ddt + dimensions = () + type = vmr_type + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 | dimensions = () | type = character | kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out + +[ccpp-arg-table] + name = make_ddt_init + type = scheme +[ nbox ] + standard_name = horizontal_dimension + type = integer + units = count + dimensions = () + intent = in +[ vmr ] + standard_name = volume_mixing_ratio_ddt + dimensions = () + type = vmr_type + intent = out +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out + +[ccpp-arg-table] + name = make_ddt_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_files/test_unknown_ddt_type.meta b/test/unit_tests/sample_files/test_unknown_ddt_type.meta new file mode 100644 index 00000000..e41eeefb --- /dev/null +++ b/test/unit_tests/sample_files/test_unknown_ddt_type.meta @@ -0,0 +1,14 @@ +[ccpp-table-properties] + name = make_ddt + type = scheme + dependencies = + +######################################################################## +[ccpp-arg-table] + name = make_ddt_run + type = scheme +[ vmr ] + standard_name = volume_mixing_ratio_ddt + dimensions = () + type = banana + intent = inout diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.F90 b/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.F90 new file mode 100644 index 00000000..20446f64 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.F90 @@ -0,0 +1,38 @@ +! Test parameterization with no vertical level +! + +MODULE CCPPeq1_var_in_fort_meta + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: CCPPeq1_var_in_fort_meta_run + +CONTAINS + + !> \section arg_table_CCPPeq1_var_in_fort_meta_run Argument Table + !! \htmlinclude arg_table_CCPPeq1_var_in_fort_meta_run.html + !! + subroutine CCPPeq1_var_in_fort_meta_run (foo, & +#ifdef CCPP + bar, & +#endif + errmsg, errflg) + + integer, intent(in) :: foo +#ifdef CCPP + real(kind_phys), intent(in) :: bar +#endif + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine CCPPeq1_var_in_fort_meta_run + +END MODULE CCPPeq1_var_in_fort_meta diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.meta b/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.meta new file mode 100644 index 00000000..23220216 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_in_fort_meta.meta @@ -0,0 +1,37 @@ +[ccpp-table-properties] + name = CCPPeq1_var_in_fort_meta + type = scheme + +######################################################################## +[ccpp-arg-table] + name = CCPPeq1_var_in_fort_meta_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ bar ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.F90 b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.F90 new file mode 100644 index 00000000..85b9b370 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.F90 @@ -0,0 +1,38 @@ +! Test parameterization with no vertical level +! + +MODULE CCPPeq1_var_missing_in_fort + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: CCPPeq1_var_missing_in_fort_run + +CONTAINS + + !> \section arg_table_CCPPeq1_var_missing_in_fort_run Argument Table + !! \htmlinclude arg_table_CCPPeq1_var_missing_in_fort_run.html + !! + subroutine CCPPeq1_var_missing_in_fort_run (foo, & +#ifndef CCPP + bar, & +#endif + errmsg, errflg) + + integer, intent(in) :: foo +#ifndef CCPP + real(kind_phys), intent(in) :: bar +#endif + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine CCPPeq1_var_missing_in_fort_run + +END MODULE CCPPeq1_var_missing_in_fort diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.meta b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.meta new file mode 100644 index 00000000..157117ce --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_fort.meta @@ -0,0 +1,37 @@ +[ccpp-table-properties] + name = CCPPeq1_var_missing_in_fort + type = scheme + +######################################################################## +[ccpp-arg-table] + name = CCPPeq1_var_missing_in_fort_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ bar ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.F90 b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.F90 new file mode 100644 index 00000000..155db942 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.F90 @@ -0,0 +1,38 @@ +! Test parameterization with no vertical level +! + +MODULE CCPPeq1_var_missing_in_meta + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: CCPPeq1_var_missing_in_meta_finalize + +CONTAINS + + !> \section arg_table_CCPPeq1_var_missing_in_meta_finalize Argument Table + !! \htmlinclude arg_table_CCPPeq1_var_missing_in_meta_finalize.html + !! + subroutine CCPPeq1_var_missing_in_meta_finalize (foo, & +#ifdef CCPP + bar, & +#endif + errmsg, errflg) + + integer, intent(in) :: foo +#ifdef CCPP + real(kind_phys), intent(in) :: bar +#endif + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine CCPPeq1_var_missing_in_meta_finalize + +END MODULE CCPPeq1_var_missing_in_meta diff --git a/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.meta b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.meta new file mode 100644 index 00000000..e416cf3c --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPeq1_var_missing_in_meta.meta @@ -0,0 +1,29 @@ +[ccpp-table-properties] + name = CCPPeq1_var_missing_in_meta + type = scheme + +######################################################################## +[ccpp-arg-table] + name = CCPPeq1_var_missing_in_meta_finalize + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.F90 b/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.F90 new file mode 100644 index 00000000..fed23ff0 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.F90 @@ -0,0 +1,38 @@ +! Test parameterization with no vertical level +! + +MODULE CCPPgt1_var_in_fort_meta + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: CCPPgt1_var_in_fort_meta_init + +CONTAINS + + !> \section arg_table_CCPPgt1_var_in_fort_meta_init Argument Table + !! \htmlinclude arg_table_CCPPgt1_var_in_fort_meta_init.html + !! + subroutine CCPPgt1_var_in_fort_meta_init (foo, & +#if CCPP > 1 + bar, & +#endif + errmsg, errflg) + + integer, intent(in) :: foo +#if CCPP > 1 + real(kind_phys), intent(in) :: bar +#endif + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine CCPPgt1_var_in_fort_meta_init + +END MODULE CCPPgt1_var_in_fort_meta diff --git a/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.meta b/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.meta new file mode 100644 index 00000000..c6da9e1c --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPgt1_var_in_fort_meta.meta @@ -0,0 +1,37 @@ +[ccpp-table-properties] + name = CCPPgt1_var_in_fort_meta + type = scheme + +######################################################################## +[ccpp-arg-table] + name = CCPPgt1_var_in_fort_meta_init + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ bar ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.F90 b/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.F90 new file mode 100644 index 00000000..14a49168 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.F90 @@ -0,0 +1,38 @@ +! Test parameterization with no vertical level +! + +MODULE CCPPnotset_var_missing_in_meta + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: CCPPnotset_var_missing_in_meta_run + +CONTAINS + + !> \section arg_table_CCPPnotset_var_missing_in_meta_run Argument Table + !! \htmlinclude arg_table_CCPPnotset_var_missing_in_meta_run.html + !! + subroutine CCPPnotset_var_missing_in_meta_run (foo, & +#ifndef CCPP + bar, & +#endif + errmsg, errflg) + + integer, intent(in) :: foo +#ifndef CCPP + real(kind_phys), intent(in) :: bar +#endif + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine CCPPnotset_var_missing_in_meta_run + +END MODULE CCPPnotset_var_missing_in_meta diff --git a/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.meta b/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.meta new file mode 100644 index 00000000..4fb2c868 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/CCPPnotset_var_missing_in_meta.meta @@ -0,0 +1,29 @@ +[ccpp-table-properties] + name = CCPPnotset_var_missing_in_meta + type = scheme + +######################################################################## +[ccpp-arg-table] + name = CCPPnotset_var_missing_in_meta_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/invalid_dummy_arg.F90 b/test/unit_tests/sample_scheme_files/invalid_dummy_arg.F90 new file mode 100644 index 00000000..16f93864 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/invalid_dummy_arg.F90 @@ -0,0 +1,43 @@ +! Test parameterization with no vertical level +! + +MODULE invalid_dummy_arg + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: invalid_dummy_arg_run + +CONTAINS + + !> \section arg_table_invalid_dummy_arg_run Argument Table + !! \htmlinclude arg_table_invalid_dummy_arg_run.html + !! + subroutine invalid_dummy_arg_run(foo, timestep, temp_prev, temp_layer, qv, ps, & + errmsg, errflg) + + integer, intent(in) :: foo + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: qv(:) + real(kind_phys), intent(inout) :: ps(:) + REAL(kind_phys), intent(in) :: woohoo(:) + REAL(kind_phys), intent(inout) :: temp_layer(foo) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + do col_index = 1, foo + temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) + qv(col_index) = qv(col_index) + 1.0_kind_phys + end do + + END SUBROUTINE invalid_dummy_arg_run + +END MODULE invalid_dummy_arg diff --git a/test/unit_tests/sample_scheme_files/invalid_dummy_arg.meta b/test/unit_tests/sample_scheme_files/invalid_dummy_arg.meta new file mode 100644 index 00000000..d3364ce7 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/invalid_dummy_arg.meta @@ -0,0 +1,66 @@ +[ccpp-table-properties] + name = invalid_dummy_arg + type = scheme + +######################################################################## +[ccpp-arg-table] + name = invalid_dummy_arg_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_prev ] + standard_name = potential_temperature_at_previous_timestep + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.F90 b/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.F90 new file mode 100644 index 00000000..98100553 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.F90 @@ -0,0 +1,30 @@ +! Test parameterization with no vertical level +! + +MODULE invalid_subr_stmnt + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: invalid_subr_stmnt_init + +CONTAINS + + !> \section arg_table_invalid_subr_stmnt_init Argument Table + !! \htmlinclude arg_table_invalid_subr_stmnt_init.html + !! + subroutine invalid_subr_stmnt_init (woohoo, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine invalid_subr_stmnt_init + +END MODULE invalid_subr_stmnt diff --git a/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.meta b/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.meta new file mode 100644 index 00000000..099c7686 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/invalid_subr_stmnt.meta @@ -0,0 +1,23 @@ +[ccpp-table-properties] + name = invalid_subr_stmnt + type = scheme + +######################################################################## +[ccpp-arg-table] + name = invalid_subr_stmnt_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/mismatch_intent.F90 b/test/unit_tests/sample_scheme_files/mismatch_intent.F90 new file mode 100644 index 00000000..abcf7bc0 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/mismatch_intent.F90 @@ -0,0 +1,75 @@ +! Test parameterization with no vertical level +! + +MODULE mismatch_intent + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: mismatch_intent_init + PUBLIC :: mismatch_intent_run + PUBLIC :: mismatch_intent_finalize + +CONTAINS + + !> \section arg_table_mismatch_intent_run Argument Table + !! \htmlinclude arg_table_mismatch_intent_run.html + !! + subroutine mismatch_intent_run(foo, timestep, temp_prev, temp_layer, qv, ps, & + errmsg, errflg) + + integer, intent(in) :: foo + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: qv(:) + real(kind_phys), intent(inout) :: ps(:) + REAL(kind_phys), intent(in) :: temp_prev(:) + REAL(kind_phys), intent(inout) :: temp_layer(foo) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + do col_index = 1, foo + temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) + qv(col_index) = qv(col_index) + 1.0_kind_phys + end do + + END SUBROUTINE mismatch_intent_run + + !> \section arg_table_mismatch_intent_init Argument Table + !! \htmlinclude arg_table_mismatch_intent_init.html + !! + subroutine mismatch_intent_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine mismatch_intent_init + + !> \section arg_table_mismatch_intent_finalize Argument Table + !! \htmlinclude arg_table_mismatch_intent_finalize.html + !! + subroutine mismatch_intent_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine mismatch_intent_finalize + +END MODULE mismatch_intent diff --git a/test/unit_tests/sample_scheme_files/mismatch_intent.meta b/test/unit_tests/sample_scheme_files/mismatch_intent.meta new file mode 100644 index 00000000..64b8733a --- /dev/null +++ b/test/unit_tests/sample_scheme_files/mismatch_intent.meta @@ -0,0 +1,102 @@ +[ccpp-table-properties] + name = mismatch_intent + type = scheme + +######################################################################## +[ccpp-arg-table] + name = mismatch_intent_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_prev ] + standard_name = potential_temperature_at_previous_timestep + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = () + type = real + kind = kind_phys + intent = in +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent) + type = real + kind = kind_fizz + intent = inout +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = integer + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = mismatch_intent_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = mismatch_intent_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/missing_arg_table.F90 b/test/unit_tests/sample_scheme_files/missing_arg_table.F90 new file mode 100644 index 00000000..9d0a02af --- /dev/null +++ b/test/unit_tests/sample_scheme_files/missing_arg_table.F90 @@ -0,0 +1,75 @@ +! Test parameterization with no vertical level +! + +MODULE missing_arg_table + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: missing_arg_table_init + PUBLIC :: missing_arg_table_run + PUBLIC :: missing_arg_table_finalize + +CONTAINS + + !> \section arg_table_missing_arg_table_run Argument Table + !! \htmlinclude arg_table_missing_arg_table_run.html + !! + subroutine missing_arg_table_run(foo, timestep, temp_prev, temp_layer, qv, ps, & + errmsg, errflg) + + integer, intent(in) :: foo + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: qv(:) + real(kind_phys), intent(inout) :: ps(:) + REAL(kind_phys), intent(in) :: temp_prev(:) + REAL(kind_phys), intent(inout) :: temp_layer(foo) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + do col_index = 1, foo + temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) + qv(col_index) = qv(col_index) + 1.0_kind_phys + end do + + END SUBROUTINE missing_arg_table_run + + !> \section arg_table_missing_arg_table_init Argument Table + !! \htmlinclude arg_table_missing_arg_table_init.html + !! + subroutine missing_arg_table_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine missing_arg_table_init + + !> \section arg_table_missing_arg_table_finalize Argument Table + !! \htmlinclude arg_table_missing_arg_table_finalize.html + !! + subroutine missing_arg_table_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine missing_arg_table_finalize + +END MODULE missing_arg_table diff --git a/test/unit_tests/sample_scheme_files/missing_arg_table.meta b/test/unit_tests/sample_scheme_files/missing_arg_table.meta new file mode 100644 index 00000000..a221dcbd --- /dev/null +++ b/test/unit_tests/sample_scheme_files/missing_arg_table.meta @@ -0,0 +1,41 @@ +[ccpp-table-properties] + name = missing_arg_table + type = scheme + +######################################################################## +[ccpp-arg-table] + name = missing_arg_table_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = missing_arg_table_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/missing_fort_header.F90 b/test/unit_tests/sample_scheme_files/missing_fort_header.F90 new file mode 100644 index 00000000..92981eb5 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/missing_fort_header.F90 @@ -0,0 +1,73 @@ +! Test parameterization with no vertical level +! + +MODULE missing_fort_header + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: missing_fort_header_init + PUBLIC :: missing_fort_header_run + PUBLIC :: missing_fort_header_finalize + +CONTAINS + + !> \section fort_header_missing_arg_table_run Argument Table + !! \htmlinclude fort_header_missing_arg_table_run.html + !! + subroutine missing_fort_header_run(foo, timestep, temp_prev, temp_layer, qv, ps, & + errmsg, errflg) + + integer, intent(in) :: foo + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: qv(:) + real(kind_phys), intent(inout) :: ps(:) + REAL(kind_phys), intent(in) :: temp_prev(:) + REAL(kind_phys), intent(inout) :: temp_layer(foo) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + do col_index = 1, foo + temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) + qv(col_index) = qv(col_index) + 1.0_kind_phys + end do + + END SUBROUTINE missing_fort_header_run + + !> \section fort_header_missing_arg_table_init Argument Table + !! \htmlinclude fort_header_missing_arg_table_init.html + !! + subroutine missing_fort_header_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine missing_fort_header_init + + !! + subroutine missing_fort_header_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine missing_fort_header_finalize + +END MODULE missing_fort_header diff --git a/test/unit_tests/sample_scheme_files/missing_fort_header.meta b/test/unit_tests/sample_scheme_files/missing_fort_header.meta new file mode 100644 index 00000000..4767dc63 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/missing_fort_header.meta @@ -0,0 +1,102 @@ +[ccpp-table-properties] + name = missing_fort_header + type = scheme + +######################################################################## +[ccpp-arg-table] + name = missing_fort_header_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_prev ] + standard_name = potential_temperature_at_previous_timestep + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = missing_fort_header_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = missing_fort_header_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/reorder.F90 b/test/unit_tests/sample_scheme_files/reorder.F90 new file mode 100644 index 00000000..d3c92530 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/reorder.F90 @@ -0,0 +1,68 @@ +MODULE reorder + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: reorder_init + PUBLIC :: reorder_run + PUBLIC :: reorder_finalize + +CONTAINS + + !> \section arg_table_reorder_run Argument Table + !! \htmlinclude arg_table_reorder_run.html + !! + subroutine reorder_run(foo, timestep, temp_prev, temp_layer, qv, ps, & + errmsg, errflg) + + integer, intent(in) :: foo + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: qv(:) + real(kind_phys), intent(inout) :: ps(:) + REAL(kind_phys), intent(in) :: temp_prev(:) + REAL(kind_phys), intent(inout) :: temp_layer(foo) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + do col_index = 1, foo + temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) + qv(col_index) = qv(col_index) + 1.0_kind_phys + end do + + END SUBROUTINE reorder_run + + !> \section arg_table_reorder_init Argument Table + !! \htmlinclude arg_table_reorder_init.html + !! + subroutine reorder_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + errmsg = '' + errflg = 0 + + end subroutine reorder_init + + !> \section arg_table_reorder_finalize Argument Table + !! \htmlinclude arg_table_reorder_finalize.html + !! + subroutine reorder_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + errmsg = '' + errflg = 0 + + end subroutine reorder_finalize + +END MODULE reorder diff --git a/test/unit_tests/sample_scheme_files/reorder.meta b/test/unit_tests/sample_scheme_files/reorder.meta new file mode 100644 index 00000000..1a64ebbf --- /dev/null +++ b/test/unit_tests/sample_scheme_files/reorder.meta @@ -0,0 +1,102 @@ +[ccpp-table-properties] + name = reorder + type = scheme + +######################################################################## +[ccpp-arg-table] + name = reorder_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = reorder_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_prev ] + standard_name = potential_temperature_at_previous_timestep + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = reorder_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/sample_scheme_files/temp_adjust.F90 b/test/unit_tests/sample_scheme_files/temp_adjust.F90 new file mode 100644 index 00000000..2dcebe31 --- /dev/null +++ b/test/unit_tests/sample_scheme_files/temp_adjust.F90 @@ -0,0 +1,75 @@ +! Test parameterization with no vertical level +! + +MODULE temp_adjust + + USE ccpp_kinds, ONLY: kind_phys + + IMPLICIT NONE + PRIVATE + + PUBLIC :: temp_adjust_init + PUBLIC :: temp_adjust_run + PUBLIC :: temp_adjust_finalize + +CONTAINS + + !> \section arg_table_temp_adjust_run Argument Table + !! \htmlinclude arg_table_temp_adjust_run.html + !! + subroutine temp_adjust_run(foo, timestep, temp_prev, temp_layer, qv, ps, & + errmsg, errflg) + + integer, intent(in) :: foo + real(kind_phys), intent(in) :: timestep + real(kind_phys), intent(inout) :: qv(:) + real(kind_phys), intent(inout) :: ps(:) + REAL(kind_phys), intent(in) :: temp_prev(:) + REAL(kind_phys), intent(inout) :: temp_layer(foo) + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + !---------------------------------------------------------------- + + integer :: col_index + + errmsg = '' + errflg = 0 + + do col_index = 1, foo + temp_layer(col_index) = temp_layer(col_index) + temp_prev(col_index) + qv(col_index) = qv(col_index) + 1.0_kind_phys + end do + + END SUBROUTINE temp_adjust_run + + !> \section arg_table_temp_adjust_init Argument Table + !! \htmlinclude arg_table_temp_adjust_init.html + !! + subroutine temp_adjust_init (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_adjust_init + + !> \section arg_table_temp_adjust_finalize Argument Table + !! \htmlinclude arg_table_temp_adjust_finalize.html + !! + subroutine temp_adjust_finalize (errmsg, errflg) + + character(len=512), intent(out) :: errmsg + integer, intent(out) :: errflg + + ! This routine currently does nothing + + errmsg = '' + errflg = 0 + + end subroutine temp_adjust_finalize + +END MODULE temp_adjust diff --git a/test/unit_tests/sample_scheme_files/temp_adjust.meta b/test/unit_tests/sample_scheme_files/temp_adjust.meta new file mode 100644 index 00000000..9ff7d48b --- /dev/null +++ b/test/unit_tests/sample_scheme_files/temp_adjust.meta @@ -0,0 +1,102 @@ +[ccpp-table-properties] + name = temp_adjust + type = scheme + +######################################################################## +[ccpp-arg-table] + name = temp_adjust_run + type = scheme +[ foo ] + standard_name = horizontal_loop_extent + type = integer + units = count + dimensions = () + intent = in +[ timestep ] + standard_name = time_step_for_physics + long_name = time step + units = s + dimensions = () + type = real + kind = kind_phys + intent = in +[ temp_prev ] + standard_name = potential_temperature_at_previous_timestep + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = in +[ temp_layer ] + standard_name = potential_temperature + units = K + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ qv ] + standard_name = water_vapor_specific_humidity + units = kg kg-1 + dimensions = (horizontal_loop_extent) + type = real + kind = kind_phys + intent = inout +[ ps ] + standard_name = surface_air_pressure + state_variable = true + type = real + kind = kind_phys + units = Pa + dimensions = (horizontal_loop_extent) + intent = inout +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_adjust_init + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out +[ccpp-arg-table] + name = temp_adjust_finalize + type = scheme +[ errmsg ] + standard_name = ccpp_error_message + long_name = Error message for error handling in CCPP + units = 1 + dimensions = () + type = character + kind = len=512 + intent = out +[ errflg ] + standard_name = ccpp_error_flag + long_name = Error flag for error handling in CCPP + units = flag + dimensions = () + type = integer + intent = out diff --git a/test/unit_tests/test_metadata_scheme_file.py b/test/unit_tests/test_metadata_scheme_file.py new file mode 100644 index 00000000..0571da37 --- /dev/null +++ b/test/unit_tests/test_metadata_scheme_file.py @@ -0,0 +1,280 @@ +#! /usr/bin/env python + +""" +----------------------------------------------------------------------- + Description: capgen needs to compare a metadata header against the + associated CCPP Fortran interface routine. This set of + tests is testing the parse_scheme_files function in + ccpp_capgen.py which performs the operations in the first + bullet below. Each test calls this function. + + * This script contains unit tests that do the following: + 1) Read a metadata file (to collect the metadata headers) + 2) Read the associated CCPP Fortran scheme file (to + collect Fortran interfaces) + 3) Compare the metadata header against the Fortran + + * Tests include: + - Correctly identify when the metadata file matches the + Fortran, even if the routines are not in the same order + - Correctly detect a missing metadata header + - Correctly detect a missing Fortran interface + - Correctly detect a mismatch between the metadata and the + Fortran + - Correctly detect invalid Fortran subroutine statements, + invalid dummy argument statements, and invalid Fortran + between the subroutine statement and the end of the + variable declaration block. + - Correctly interpret Fortran with preprocessor logic + which affects the subroutine statement and/or the dummy + argument statements + - Correctly interpret Fortran with preprocessor logic + which affects the subroutine statement and/or the dummy + argument statements resulting in a mismatch between the + metadata header and the Fortran + - Correctly interpret Fortran with preprocessor logic + which affects the subroutine statement and/or the dummy + argument statements resulting in incorrect Fortran + + Assumptions: + + Command line arguments: none + + Usage: python test_metadata_scheme_file.py # run the unit tests +----------------------------------------------------------------------- +""" +import sys +import os +import logging +import unittest + +_TEST_DIR = os.path.dirname(os.path.abspath(__file__)) +_SCRIPTS_DIR = os.path.abspath(os.path.join(_TEST_DIR, os.pardir, + os.pardir, "scripts")) +if not os.path.exists(_SCRIPTS_DIR): + raise ImportError("Cannot find scripts directory") + +sys.path.append(_SCRIPTS_DIR) + +# pylint: disable=wrong-import-position +from ccpp_capgen import parse_scheme_files +# pylint: enable=wrong-import-position + +class MetadataHeaderTestCase(unittest.TestCase): + """Unit tests for parse_scheme_files""" + + def setUp(self): + """Setup important directories and logging""" + self._sample_files_dir = os.path.join(_TEST_DIR, "sample_scheme_files") + self._logger = logging.getLogger(self.__class__.__name__) + + def test_good_scheme_file(self): + """Test that good metadata file matches the Fortran, with routines in the same order """ + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "temp_adjust.meta")] + preproc_defs = {} + #Exercise + scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, + self._logger) + #Verify size of returned list equals number of scheme headers in the test file + # and that header (subroutine) names are 'temp_adjust_[init,run,finalize]' + self.assertEqual(len(scheme_headers), 3) + #Verify header titles + titles = [elem.title for elem in scheme_headers] + self.assertTrue('temp_adjust_init' in titles) + self.assertTrue('temp_adjust_run' in titles) + self.assertTrue('temp_adjust_finalize' in titles) + #Verify size and name of table_dict matches scheme name + self.assertEqual(len(table_dict), 1) + self.assertTrue('temp_adjust' in table_dict) + + def test_reordered_scheme_file(self): + """Test that metadata file matches the Fortran when the routines are not in the same order """ + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "reorder.meta")] + preproc_defs = {} + #Exercise + scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, + self._logger) + #Verify size of returned list equals number of scheme headers in the test file + # and that header (subroutine) names are 'reorder_[init,run,finalize]' + self.assertEqual(len(scheme_headers), 3) + #Verify header titles + titles = [elem.title for elem in scheme_headers] + self.assertTrue('reorder_init' in titles) + self.assertTrue('reorder_run' in titles) + self.assertTrue('reorder_finalize' in titles) + #Verify size and name of table_dict matches scheme name + self.assertEqual(len(table_dict), 1) + self.assertTrue('reorder' in table_dict) + + def test_missing_metadata_header(self): + """Test that a missing metadata header (aka arg table) is corretly detected """ + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "missing_arg_table.meta")] + preproc_defs = {} + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify correct error message returned + emsg = "No matching metadata header found for missing_arg_table_run in" + self.assertTrue(emsg in str(context.exception)) + + def test_missing_fortran_header(self): + """Test that a missing fortran header is corretly detected """ + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "missing_fort_header.meta")] + preproc_defs = {} + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify correct error message returned + emsg = "No matching Fortran routine found for missing_fort_header_run in" + self.assertTrue(emsg in str(context.exception)) + + def test_mismatch_intent(self): + """Test that differing intent, kind, rank, and type between metadata and fortran is corretly detected """ + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "mismatch_intent.meta")] + preproc_defs = {} + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify 4 correct error messages returned + self.assertTrue('intent mismatch (in != inout) in mismatch_intent_run, at' in str(context.exception)) + self.assertTrue('kind mismatch (kind_fizz != kind_phys) in mismatch_intent_run, at' in str(context.exception)) + self.assertTrue('rank mismatch in mismatch_intent_run/potential_temperature (0 != 1), at' in str(context.exception)) + self.assertTrue('type mismatch (integer != real) in mismatch_intent_run, at' in str(context.exception)) + self.assertTrue('4 errors found comparing' in str(context.exception)) + + def test_invalid_subr_stmnt(self): + """Test that invalid Fortran subroutine statements are correctly detected """ + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "invalid_subr_stmnt.meta")] + preproc_defs = {} + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify correct error message returned + self.assertTrue("Invalid dummy argument, 'errmsg', at" in str(context.exception)) + + def test_invalid_dummy_arg(self): + """Test that invalid dummy argument statements are correctly detected """ + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "invalid_dummy_arg.meta")] + preproc_defs = {} + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify correct error message returned + self.assertTrue("Invalid dummy argument, 'woohoo', at" in str(context.exception)) + +# pylint: disable=invalid-name + def test_CCPPnotset_var_missing_in_meta(self): + """Test for correct detection of a variable that REMAINS in the subroutine argument list + (due to an undefined pre-processor directive: #ifndef CCPP), BUT IS NOT PRESENT in meta file""" + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "CCPPnotset_var_missing_in_meta.meta")] + preproc_defs = {} # CCPP directive is not set + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify 3 correct error messages returned + self.assertTrue('Variable mismatch in CCPPnotset_var_missing_in_meta_run, variables missing from metadata header.' + in str(context.exception)) + self.assertTrue('Out of order argument, errmsg in CCPPnotset_var_missing_in_meta_run' in str(context.exception)) + self.assertTrue('Out of order argument, errflg in CCPPnotset_var_missing_in_meta_run' in str(context.exception)) + self.assertTrue('3 errors found comparing' in str(context.exception)) + + def test_CCPPeq1_var_missing_in_fort(self): + """Test for correct detection of a variable that IS REMOVED the subroutine argument list + (due to a pre-processor directive: #ifndef CCPP), but IS PRESENT in meta file""" + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "CCPPeq1_var_missing_in_fort.meta")] + preproc_defs = {'CCPP':1} # Set CCPP directive + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify 3 correct error messages returned + self.assertTrue('Variable mismatch in CCPPeq1_var_missing_in_fort_run, variables missing from Fortran scheme.' + in str(context.exception)) + self.assertTrue('Variable mismatch in CCPPeq1_var_missing_in_fort_run, no Fortran variable bar.' + in str(context.exception)) + self.assertTrue('Out of order argument, errmsg in CCPPeq1_var_missing_in_fort_run' in str(context.exception)) + self.assertTrue('3 errors found comparing' in str(context.exception)) + + def test_CCPPeq1_var_in_fort_meta(self): + """Test positive case of a variable that IS PRESENT the subroutine argument list + (due to a pre-processor directive: #ifdef CCPP), and IS PRESENT in meta file""" + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "CCPPeq1_var_in_fort_meta.meta")] + preproc_defs = {'CCPP':1} # Set CCPP directive + #Exercise + scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify size of returned list equals number of scheme headers in the test file (1) + # and that header (subroutine) name is 'CCPPeq1_var_in_fort_meta_run' + self.assertEqual(len(scheme_headers), 1) + #Verify header titles + titles = [elem.title for elem in scheme_headers] + self.assertTrue('CCPPeq1_var_in_fort_meta_run' in titles) + + #Verify size and name of table_dict matches scheme name + self.assertEqual(len(table_dict), 1) + self.assertTrue('CCPPeq1_var_in_fort_meta' in table_dict) + + def test_CCPPgt1_var_in_fort_meta(self): + """Test positive case of a variable that IS PRESENT the subroutine argument list + (due to a pre-processor directive: #if CCPP > 1), and IS PRESENT in meta file""" + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "CCPPgt1_var_in_fort_meta.meta")] + preproc_defs = {'CCPP':2} # Set CCPP directive to > 1 + #Exercise + scheme_headers, table_dict = parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify size of returned list equals number of scheme headers in the test file (1) + # and that header (subroutine) name is 'CCPPgt1_var_in_fort_meta_init' + self.assertEqual(len(scheme_headers), 1) + #Verify header titles + titles = [elem.title for elem in scheme_headers] + self.assertTrue('CCPPgt1_var_in_fort_meta_init' in titles) + + #Verify size and name of table_dict matches scheme name + self.assertEqual(len(table_dict), 1) + self.assertTrue('CCPPgt1_var_in_fort_meta' in table_dict) + + def test_CCPPgt1_var_in_fort_meta2(self): + """Test correct detection of a variable that IS NOT PRESENT the subroutine argument list + (due to a pre-processor directive: #if CCPP > 1), but IS PRESENT in meta file""" + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "CCPPgt1_var_in_fort_meta.meta")] + preproc_defs = {'CCPP':1} # Set CCPP directive to 1 + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify 3 correct error messages returned + self.assertTrue('Variable mismatch in CCPPgt1_var_in_fort_meta_init, variables missing from Fortran scheme.' + in str(context.exception)) + self.assertTrue('Variable mismatch in CCPPgt1_var_in_fort_meta_init, no Fortran variable bar.' + in str(context.exception)) + self.assertTrue('Out of order argument, errmsg in CCPPgt1_var_in_fort_meta_init' in str(context.exception)) + self.assertTrue('3 errors found comparing' in str(context.exception)) + + def test_CCPPeq1_var_missing_in_meta(self): + """Test correct detection of a variable that IS PRESENT the subroutine argument list + (due to a pre-processor directive: #ifdef CCPP), and IS NOT PRESENT in meta file""" + #Setup + scheme_files = [os.path.join(self._sample_files_dir, "CCPPeq1_var_missing_in_meta.meta")] + preproc_defs = {'CCPP':1} # Set CCPP directive + #Exercise + with self.assertRaises(Exception) as context: + parse_scheme_files(scheme_files, preproc_defs, self._logger) + #Verify 3 correct error messages returned + self.assertTrue('Variable mismatch in CCPPeq1_var_missing_in_meta_finalize, variables missing from metadata header.' + in str(context.exception)) + self.assertTrue('Out of order argument, errmsg in CCPPeq1_var_missing_in_meta_finalize' in str(context.exception)) + self.assertTrue('Out of order argument, errflg in CCPPeq1_var_missing_in_meta_finalize' in str(context.exception)) + self.assertTrue('3 errors found comparing' in str(context.exception)) + +# pylint: enable=invalid-name + +if __name__ == '__main__': + unittest.main() diff --git a/test/unit_tests/test_metadata_table.py b/test/unit_tests/test_metadata_table.py new file mode 100644 index 00000000..a8014f52 --- /dev/null +++ b/test/unit_tests/test_metadata_table.py @@ -0,0 +1,401 @@ +#! /usr/bin/env python +""" +----------------------------------------------------------------------- + Description: Contains unit tests for parse_metadata_file + in scripts file metadata_table.py + + Assumptions: + + Command line arguments: none + + Usage: python test_metadata_table.py # run the unit tests +----------------------------------------------------------------------- +""" +import sys +import os +import unittest + +TEST_DIR = os.path.dirname(os.path.abspath(__file__)) +SCRIPTS_DIR = os.path.abspath(os.path.join(TEST_DIR, os.pardir, os.pardir, "scripts")) +SAMPLE_FILES_DIR = os.path.join(TEST_DIR, "sample_files") + +if not os.path.exists(SCRIPTS_DIR): + raise ImportError("Cannot find scripts directory") + +sys.path.append(SCRIPTS_DIR) + +# pylint: disable=wrong-import-position +from metadata_table import parse_metadata_file, MetadataTable +# pylint: enable=wrong-import-position + +class MetadataTableTestCase(unittest.TestCase): + + """Tests for `parse_metadata_file`.""" + + def test_good_host_file(self): + """Test that good host file test_host.meta returns one header named test_host""" + #Setup + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_host.meta") + #Exercise + result = parse_metadata_file(filename, known_ddts, logger) + #Verify that: + # no dependencies is returned as '' + # rel_path is returned as None + # size of returned list equals number of headers in the test file + # ccpp-table-properties name is 'test_host' + dependencies = result[0].dependencies + rel_path = result[0].relative_path + self.assertFalse('' in dependencies) + self.assertEqual(len(dependencies), 0) + self.assertIsNone(rel_path) + self.assertEqual(len(result), 1) + titles = [elem.table_name for elem in result] + self.assertIn('test_host', titles, msg="Header name 'test_host' is expected but not found") + + def test_good_multi_ccpp_arg_table(self): + """Test that good file with 4 ccpp-arg-table returns 4 headers""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_multi_ccpp_arg_tables.meta") + #Exercise + result = parse_metadata_file(filename, known_ddts, logger) + #Verify that size of returned list equals number of ccpp-table-properties in the test file + # ccpp-arg-tables are returned in result[0].sections() and result[1].sections() + self.assertEqual(len(result), 2) + + titles = list() + for table in result: + titles.extend([x.title for x in table.sections()]) + + self.assertIn('vmr_type', titles, msg="Header name 'vmr_type' is expected but not found") + self.assertIn('make_ddt_run', titles, msg="Header name 'make_ddt_run' is expected but not found") + self.assertIn('make_ddt_init', titles, msg="Header name 'make_ddt_init' is expected but not found") + self.assertIn('make_ddt_finalize', titles, msg="Header name 'make_ddt_finalize' is expected but not found") + + def test_bad_type_name(self): + """Test that `type = banana` returns expected error""" + #Setup + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_type_name.meta") + + #Exercise + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #Verify + #print("The exception is", context.exception) + self.assertTrue("Section type, 'banana', does not match table type, 'scheme'" in str(context.exception)) + + def test_double_header(self): + """Test that a duplicate header returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "double_header.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + self.assertTrue('table already contains \'test_host\'' in str(context.exception)) + + def test_bad_dimension(self): + """Test that `dimension = banana` returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_dimension.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + self.assertTrue('Invalid \'dimensions\' property value, \'' in str(context.exception)) + + def test_duplicate_variable(self): + """Test that a duplicate variable returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_duplicate_variable.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + self.assertTrue('Invalid (duplicate) standard name in temp_calc_adjust_run, defined at ' in str(context.exception)) + + def test_invalid_intent(self): + """Test that an invalid intent returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_invalid_intent.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + self.assertTrue('Invalid \'intent\' property value, \'banana\', at ' in str(context.exception)) + + def test_missing_intent(self): + """Test that a missing intent returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_intent.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Required property, 'intent', missing, at " + self.assertTrue(emsg in str(context.exception)) + + def test_missing_units(self): + """Test that a missing units attribute returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_units.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Required property, 'units', missing, at" + self.assertTrue(emsg in str(context.exception)) + + def test_missing_table_type(self): + """Test that a missing table type returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_table_type.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid section type, 'None'" + self.assertTrue(emsg in str(context.exception)) + + def test_bad_table_type(self): + """Test that a mismatched table type returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_table_type.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Section type, 'host', does not match table type, 'scheme'" + self.assertTrue(emsg in str(context.exception)) + + def test_missing_table_name(self): + """Test that a missing table name returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_missing_table_name.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Section name, 'None', does not match table title, 'test_missing_table_name'" + self.assertTrue(emsg in str(context.exception)) + + def test_bad_table_key(self): + """Test that a bad table key returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_table_key.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid metadata table start property, 'something', at " + self.assertTrue(emsg in str(context.exception)) + + def test_bad_line_split(self): + """Test that a bad split line with | returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_line_split.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid variable property syntax, \'\', at " + self.assertTrue(emsg in str(context.exception)) + + def test_unknown_ddt_type(self): + """Test that a DDT type = banana returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_unknown_ddt_type.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Unknown DDT type, banana, at " + self.assertTrue(emsg in str(context.exception)) + + def test_bad_var_property_name(self): + """Test that a ddt_type = None returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_var_property_name.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid variable property name, 'none', at " + self.assertTrue(emsg in str(context.exception)) + + def test_no_input(self): + """Test that no input returns expected error""" + with self.assertRaises(Exception) as context: + MetadataTable() + + #print("The exception is", context.exception) + emsg = "MetadataTable requires a name" + self.assertTrue(emsg in str(context.exception)) + + def test_no_table_type(self): + """Test that __init__ with table_type_in=None returns expected error""" + with self.assertRaises(Exception) as context: + MetadataTable(table_name_in="something", table_type_in=None, dependencies=None, \ + relative_path=None, known_ddts=None, var_dict=None, module=None, \ + parse_object=None, logger=None) + + #print("The exception is", context.exception) + emsg = "MetadataTable requires a table type" + self.assertTrue(emsg in str(context.exception)) + + def test_bad_header_type(self): + """Test that __init__ with table_type_in=banana returns expected error""" + with self.assertRaises(Exception) as context: + MetadataTable(table_name_in="something", table_type_in="banana", dependencies=None, \ + relative_path=None, known_ddts=None, var_dict=None, module=None, \ + parse_object=None, logger=None) + + #print("The exception is", context.exception) + emsg = "Invalid metadata arg table type, 'banana'" + self.assertTrue(emsg in str(context.exception)) + + def test_no_module(self): + """Test that __init__ with module=None returns expected error""" + with self.assertRaises(Exception) as context: + MetadataTable(table_name_in=None, table_type_in=None, dependencies=None, \ + relative_path=None, known_ddts=None, var_dict=None, module=None, \ + parse_object=None, logger=None) + + #print("The exception is", context.exception) + emsg = "MetadataTable requires a name" + self.assertTrue(emsg in str(context.exception)) + + def test_bad_1st_ccpp_arg_table(self): + """Test that first arg table named ccpp-farg-table returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_1st_arg_table_header.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid variable property syntax, '[ccpp-farg-table]', at " + self.assertTrue(emsg in str(context.exception)) + + def test_bad_2nd_ccpp_arg_table(self): + """Test that second arg table named ccpp-farg-table returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_bad_2nd_arg_table_header.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid variable property syntax, '[ccpp-farg-table]', at " + self.assertTrue(emsg in str(context.exception)) + + def test_mismatch_section_table_title(self): + """Test that mismatched section name and table title returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_mismatch_section_table_title.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Section name, 'test_host', does not match table title, 'banana', at " + self.assertTrue(emsg in str(context.exception)) + + def test_double_table_properties(self): + """Test that duplicate ccpp-table-properties returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "double_table_properties.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Duplicate metadata table, test_host, at " + self.assertTrue(emsg in str(context.exception)) + + def test_missing_table_properties(self): + """Test that a missing ccpp-table-properties returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "missing_table_properties.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid CCPP metadata line, '[ccpp-arg-table]', at " + self.assertTrue(emsg in str(context.exception)) + + def test_dependencies_rel_path(self): + """Test that relative_path and dependencies from ccpp-table-properties are read in correctly""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_dependencies_rel_path.meta") + + result = parse_metadata_file(filename, known_ddts, logger) + + dependencies = result[0].dependencies + rel_path = result[0].relative_path + titles = [elem.table_name for elem in result] + + self.assertEqual(len(dependencies), 4) + self.assertIn('machine.F', dependencies, msg="Dependency 'machine.F' is expected but not found") + self.assertIn('physcons.F90', dependencies, msg="Dependency 'physcons.F90' is expected but not found") + self.assertIn('GFDL_parse_tracers.F90', dependencies, msg="Dependency 'GFDL_parse_tracers.F90' is expected but not found") + self.assertIn('rte-rrtmgp/rrtmgp/mo_gas_optics_rrtmgp.F90', dependencies, \ + msg="Header name 'rte-rrtmgp/rrtmgp/mo_gas_optics_rrtmgp.F90' is expected but not found") + + self.assertIn(rel_path, "../../ccpp/physics/physics") + self.assertEqual(len(result), 1) + self.assertIn('test_host', titles, msg="Table name 'test_host' is expected but not found") + + def test_invalid_table_properties_type(self): + """Test that an invalid ccpp-table-properties type returns expected error""" + known_ddts = list() + logger = None + filename = os.path.join(SAMPLE_FILES_DIR, "test_invalid_table_properties_type.meta") + + with self.assertRaises(Exception) as context: + tables = parse_metadata_file(filename, known_ddts, logger) + + #print("The exception is", context.exception) + emsg = "Invalid metadata table type, 'banana', at " + self.assertTrue(emsg in str(context.exception)) + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_capgen.py b/tests/test_capgen.py deleted file mode 100644 index d7b05f0b..00000000 --- a/tests/test_capgen.py +++ /dev/null @@ -1,133 +0,0 @@ -#! /usr/bin/env python -""" -Test script to check ability to parse and generate caps. -""" - -# Python library imports -import os.path -if __name__ == '__main__' and __package__ is None: - import sys - import os - tdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - sdir = os.path.join(tdir, "scripts") - if not os.path.exists(sdir): - raise ImportError("Cannot find scripts directory") - # End if - sys.path.append(sdir) -# End if -import re -import logging -# CCPP framework imports -import convert_metadata -from parse_tools import register_fortran_ddt_name, init_log, set_log_level -from metadata_table import MetadataHeader - -arg_table_re = re.compile(r"(?i)[\s]*!.*section.*arg_table_") - -## Init this now so that all Exceptions can be trapped -logger = init_log('ccpp_capgen') -set_log_level(logger, logging.INFO) -## To cause convert_metadata to stop when an error condition is found -## (no metadata file), uncomment out the next line. -#logger = None - -######################################################################## - -if __name__ == "__main__": - if len(sys.argv) > 2: - pdir = sys.argv[2] - else: - pdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - pdir = os.path.join(pdir, 'tests', 'cap_tests') - if not os.path.exists(pdir): - pdir = os.getcwd() - # End if - # End if - if len(sys.argv) > 1: - sdir = sys.argv[1] - else: - sdir = os.path.join(os.path.dirname(pdir), 'ccpp-physics') - # End if - print("Converting physics files from {} with output to {}".format(sdir, pdir)) -# XXgoldyXX: v debug only - # Temporary DDT registration - register_fortran_ddt_name('GFS_control_type') - register_fortran_ddt_name('GFS_statein_type') - register_fortran_ddt_name('GFS_stateout_type') - register_fortran_ddt_name('GFS_sfcprop_type') - register_fortran_ddt_name('GFS_coupling_type') - register_fortran_ddt_name('GFS_grid_type') - register_fortran_ddt_name('GFS_tbd_type') - register_fortran_ddt_name('GFS_cldprop_type') - register_fortran_ddt_name('GFS_radtend_type') - register_fortran_ddt_name('GFS_diag_type') - register_fortran_ddt_name('GFS_interstitial_type') - register_fortran_ddt_name('GFS_data_type') - register_fortran_ddt_name('cmpfsw_type') - register_fortran_ddt_name('topflw_type') - register_fortran_ddt_name('sfcflw_type') - register_fortran_ddt_name('proflw_type') - register_fortran_ddt_name('topfsw_type') - register_fortran_ddt_name('sfcfsw_type') - register_fortran_ddt_name('profsw_type') - register_fortran_ddt_name('CCPP_interstitial_type') -# XXgoldyXX: ^ debug only - tfilenames = list() - # Find files with arg tables - for dir in ['physics', 'stochastic_physics', 'GFS_layer']: - if not os.path.exists(os.path.join(sdir, dir)): - continue - # End if - for file in os.listdir(os.path.join(sdir, dir)): - has_arg_table = False - pathname = os.path.join(sdir, dir, file) - if os.path.isfile(pathname): - with open(pathname, 'r') as infile: - preamble = True - for line in infile: - if preamble: - if line.strip().lower() == 'contains': - preamble = False - # End if - # End if - if arg_table_re.match(line) is not None: - has_arg_table = True - break - # End if - # End for - # End with - # End if - if has_arg_table: - tfilenames.append(pathname) - # End if - # End for - # End for - print("Found {} files with arg tables".format(len(tfilenames))) - total_headers = 0 - for tfile in tfilenames: - try: - tbase = os.path.basename(tfile) - file = os.path.join(pdir, tbase) - mbase = "{}.meta".format('.'.join(tbase.split('.')[:-1])) - mdfile = os.path.join(pdir, mbase) - if not os.path.exists(file): - infile = tfile - if not os.path.exists(infile): - print("WARNING: Cannot find '{}'".format(infile)) - else: - convert_metadata.convert_file(infile, file, mdfile, logger) - # End if - # End if - if os.path.exists(mdfile): - mh = MetadataHeader.parse_metadata_file(mdfile) - print("{} metadata headers parsed in {}".format(len(mh), mdfile)) - total_headers = total_headers + len(mh) - else: - print("{} not found!".format(mdfile)) - # End if - except ValueError as ve: - print("{}: {}".format(infile, ve)) - # End except - # End for - print("Found {} total metadata headers".format(total_headers)) -# End if __main__ diff --git a/tests/test_metadata_parser.py b/tests/test_metadata_parser.py index 0db8c26a..5ce55dab 100644 --- a/tests/test_metadata_parser.py +++ b/tests/test_metadata_parser.py @@ -1,4 +1,11 @@ -from metadata_table import MetadataHeader, Var +import logging +import os +import sys + +#sys.path.append(os.path.join(os.path.split(__file__)[0], '../scripts/parse_tools')) +from parse_checkers import registered_fortran_ddt_names +from metadata_table import MetadataTable, parse_metadata_file, Var +#from metadata_table import MetadataHeader example_table = """ [ccpp-table-properties] @@ -20,21 +27,27 @@ """ -def test_MetadataHeader_parse_table(tmpdir): +def test_MetadataTable_parse_table(tmpdir): path = str(tmpdir.join("table.meta")) with open(path, "w") as f: f.write(example_table) - table1, table2 = MetadataHeader.parse_metadata_file(path) + metadata_headers = parse_metadata_file(path, known_ddts=registered_fortran_ddt_names(), + logger=logging.getLogger(__name__)) - # check first table - assert table1.name == "<name>" - assert table1.type == "scheme" - assert table1.dependencies == ["path/a.f", "path/b.f"] + # check metadata header + assert len(metadata_headers) == 1 + metadata_header = metadata_headers[0] + assert metadata_header.table_name == "<name>" + assert metadata_header.table_type == "scheme" + assert metadata_header.relative_path == "path" + assert metadata_header.dependencies == ["a.f", "b.f"] - # check second table - assert table2.name == "<name>" - assert table2.type == "scheme" - (im_data,) = table2.variable_list() + # check metadata section + assert len(metadata_header.sections()) == 1 + metadata_section = metadata_header.sections()[0] + assert metadata_section.name == "<name>" + assert metadata_section.type == "scheme" + (im_data,) = metadata_section.variable_list() assert isinstance(im_data, Var) assert im_data.get_dimensions() == []