Skip to content

Commit

Permalink
Code changes and tests for COSIMA/access-om2#149
Browse files Browse the repository at this point in the history
  • Loading branch information
nichannah committed Jun 14, 2019
1 parent a4c3081 commit e51e6a8
Show file tree
Hide file tree
Showing 17 changed files with 1,383 additions and 26 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ gmon.*
tests/minimal/*.nc
tests/JRA55_IAF/debug.root.*
tests/JRA55_RYF/debug.root.*
tests/JRA55_IAF_SINGLE_FIELD/debug.root.*
14 changes: 10 additions & 4 deletions atm/src/atm.F90
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ program atm

type(simple_timer_type) :: field_read_timer, ice_wait_timer
type(simple_timer_type) :: init_runoff_timer, remap_runoff_timer
type(simple_timer_type) :: coupler_put_timer

namelist /atm_nml/ forcing_file, accessom2_config_dir

Expand Down Expand Up @@ -80,6 +81,8 @@ program atm
accessom2%simple_timers_enabled())
call remap_runoff_timer%init('remap_runoff', accessom2%logger, &
accessom2%simple_timers_enabled())
call coupler_put_timer%init('coupler_put', accessom2%logger, &
accessom2%simple_timers_enabled())

! Initialise the runoff remapping object with ice grid information.
call init_runoff_timer%start()
Expand Down Expand Up @@ -124,11 +127,13 @@ program atm
endif
endif

call coupler_put_timer%start()
if (index(fields(i)%name, 'runof') /= 0) then
call coupler%put(runoff_field, cur_runtime_in_seconds, err)
else
call coupler%put(fields(i), cur_runtime_in_seconds, err)
endif
call coupler_put_timer%stop()
enddo

! Block until we receive from ice. Ice will do a nonblocking send immediately
Expand All @@ -137,18 +142,19 @@ program atm
call accessom2%atm_ice_sync()
call ice_wait_timer%stop()

call accessom2%progress_date(dt)

call accessom2%logger%write(LOG_INFO, '{ "cur_exp_date" : "'//accessom2%get_cur_exp_date_str()//'" }')
call accessom2%logger%write(LOG_INFO, '{ "cur_forcing_date" : "'//accessom2%get_cur_forcing_date_str()//'" }')
call accessom2%logger%write(LOG_INFO, '{ "cur_exp-datetime" : "'//accessom2%get_cur_exp_date_str()//'" }')
call accessom2%logger%write(LOG_INFO, '{ "cur_forcing-datetime" : "'//accessom2%get_cur_forcing_date_str()//'" }')
call accessom2%logger%write(LOG_DEBUG, 'cur_runtime_in_seconds ', &
int(accessom2%get_cur_runtime_in_seconds()))

call accessom2%progress_date(dt)
enddo

call field_read_timer%write_stats()
call ice_wait_timer%write_stats()
call init_runoff_timer%write_stats()
call remap_runoff_timer%write_stats()
call coupler_put_timer%write_stats()

call accessom2%logger%write(LOG_INFO, 'Run complete, calling deinit')

Expand Down
11 changes: 11 additions & 0 deletions tests/JRA55_IAF_SINGLE_FIELD/accessom2.nml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
&accessom2_nml
log_level = 'DEBUG'
ice_ocean_timestep = 5400
enable_simple_timers = .true.
/

&date_manager_nml
forcing_start_date = '1958-12-30T00:00:00'
forcing_end_date = '1960-01-01T00:00:00'
restart_period = 1, 0, 0
/
7 changes: 7 additions & 0 deletions tests/JRA55_IAF_SINGLE_FIELD/atm.nml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
&atm_nml
accessom2_config_dir = './'
&end

&runoff_nml
remap_weights_file = '../test_data/rmp_jrar_to_cict_CONSERV.nc'
&end
1,201 changes: 1,201 additions & 0 deletions tests/JRA55_IAF_SINGLE_FIELD/checksums.txt

Large diffs are not rendered by default.

10 changes: 10 additions & 0 deletions tests/JRA55_IAF_SINGLE_FIELD/forcing.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"description": "JRA55-do IAF forcing",
"inputs": [
{
"filename": "/g/data1/ua8/JRA55-do/latest/rsds.{{year}}.nc",
"fieldname": "rsds",
"cname": "swfld_ai"
}
]
}
9 changes: 9 additions & 0 deletions tests/JRA55_IAF_SINGLE_FIELD/ice.nml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
&ice_nml
resolution(:) = 360,300
from_atm_field_names = 'swfld_i'
to_ocean_field_names = 'strsu_io'
from_ocean_field_names = 'sst_i'
ice_grid_file = '../test_data/grid.nc'
ice_mask_file = '../test_data/kmt.nc'
dt = 5400
&end
86 changes: 86 additions & 0 deletions tests/JRA55_IAF_SINGLE_FIELD/namcouple
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
# This is a typical input file for OASIS 3.0, using netCDF format
# for restart input files. Oasis reads this file at run-time.
#
# Any line beginning with # is ignored. Blank lines are not allowed.
#
# $SEQMODE
# This keyword concerns the coupling algorithm. Put here the maximum number
# of fields that have to be, at one particular coupling timestep,
# necessarily exchanged sequentially in a given order.
# 1
# $END
#########################################################################

$NFIELDS
# This is the total number of fields being exchanged.
### 1 fields atm -> ice
### 1 fields ice -> ocn
### 1 fields ocn -> ice
3
$END
##########################################################################
$NBMODEL
# This gives you the number of models running in this experiment +
# their names (character*6, exactly!) + , in option, the maximum Fortran unit
# number used by each model; 1024 will be used if none are given.
#
3 cicexx matmxx mom5xx
$END
###########################################################################
$RUNTIME
# This gives you the total simulated time for this run in seconds
# This is not used but needs to be >= to the timestep to satisfy error checking.
# See https://github.com/OceansAus/oasis3-mct/issues/3
10800
$END
###########################################################################
$NLOGPRT
# Index of printing level in output file cplout: 0 = no printing
# 1 = main routines and field names when treated, 2 = complete output
0
$END
###########################################################################
$STRINGS
#
# The above variables are the general parameters for the experiment.
# Everything below has to do with the fields being exchanged.
#
#
# ATMOSPHERE --->>> ICE
# -----------------------
##########
# Field 01 : swflx down
##########
swfld_ai swfld_i 367 10800 3 a2i.nc EXPORTED
jrat cict LAG=0 SEQ=+1
P 0 P 0
#
LOCTRANS MAPPING SCRIPR
INSTANT
../test_data/rmp_jra55_cice_conserve.nc dst
CONSERV LR SCALAR LATLON 10 FRACNNEI FIRST
############################################################################
#
# ICE --->>> OCEAN
# ----------------
##########
# Field 11 : ice-ocean interface stress (x-direction)
##########
strsu_io u_flux 170 3600 1 i2o.nc IGNORED
cict cict LAG=0 SEQ=+1
#
LOCTRANS
INSTANT
############################################################################
#
# OCEAN --->>> ICE
# ----------------
##########
# Field 26 : Sea surface temperature (Celsius in MOM4, Kelvin in MOM5)
##########
t_surf sst_i 1 3600 1 o2i.nc IGNORED
cict cict LAG=0 SEQ=+1
#
LOCTRANS
INSTANT
$END
6 changes: 6 additions & 0 deletions tests/JRA55_IAF_SINGLE_FIELD/ocean.nml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
&ocean_nml
from_ice_field_names = 'u_flux'
to_ice_field_names = 't_surf'
resolution(:) = 360,300
dt = 5400
&end
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
64 changes: 42 additions & 22 deletions tests/test_stubs.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,52 @@ def extract_field_name(checksum):
return k.split('-')[2]


def build_log_items(forcing_update_dts, field_update_files,
field_update_indices, checksums):
def dicts_to_list(key_name, log_str):
lines = filter(lambda x : key_name in x, log_str.splitlines())
out = []
for l in lines:
out += list(ast.literal_eval(l.strip()).values())
return out


def build_log_items(log_str):

forcing_update_dts = dicts_to_list('forcing_update_field-datetime', log_str)
forcing_update_dts = [dateutil.parser.parse(d) for d in forcing_update_dts]
field_update_files = dicts_to_list('field_update_data-file', log_str)
field_update_indices = dicts_to_list('field_update_data-index', log_str)

tmp_chk = filter(lambda x : 'checksum' in x, log_str.splitlines())
checksums = []
for c in tmp_chk:
checksums.append(ast.literal_eval(c.strip()))

log_items = []

assert len(forcing_update_dts) == len(field_update_files) == \
len(field_update_indices) == len(checksums)

field_names = set()
for i in range(len(forcing_update_dts)):
field_name = extract_field_name(checksums[i])
field_names.add(field_name)
item = LogItem(field_name, field_update_files[i],
field_update_indices[i], forcing_update_dts[i],
checksums[i])
log_items.append(item)

cur_exp_dts = dicts_to_list('cur_exp-datetime', log_str)
cur_exp_dts = [dateutil.parser.parse(d) for d in cur_exp_dts]
cur_forcing_dts = dicts_to_list('cur_forcing-datetime', log_str)
cur_forcing_dts = [dateutil.parser.parse(d) for d in cur_forcing_dts]

# There should be one cur_exp_dts and cur_forcing_dts for each exchange of
# all fields
assert len(cur_exp_dts) == len(cur_forcing_dts) == \
(len(forcing_update_dts) // len(field_names))

for li in log_items:

return log_items

def remove_duplicate_runoff_checksums(checksums):
Expand Down Expand Up @@ -69,10 +101,15 @@ def remove_duplicate_runoff_checksums(checksums):
def helper():
return Helper()

@pytest.fixture(params=['JRA55_IAF', 'JRA55_RYF'])
@pytest.fixture(params=['JRA55_IAF', 'JRA55_RYF', 'JRA55_RYF_MINIMAL'])
def exp(request):
yield request.param

@pytest.fixture(params=['JRA55_IAF_SINGLE_FIELD'])
def exp_fast(request):
yield request.param


class TestStubs:

@pytest.mark.fast
Expand Down Expand Up @@ -139,23 +176,6 @@ def test_forcing_fields(self, helper, exp):
forcing_start_date = dateutil.parser.parse(forcing_start_date)
forcing_end_date = dateutil.parser.parse(forcing_end_date)

# Parse some YATM output
def dicts_to_list(key_name, log_str):
lines = filter(lambda x : key_name in x, log_str.splitlines())
out = []
for l in lines:
out += list(ast.literal_eval(l.strip()).values())
return out

forcing_update_dts = dicts_to_list('forcing_update_field-datetime', matm_log)
forcing_update_dts = [dateutil.parser.parse(d) for d in forcing_update_dts]
field_update_files = dicts_to_list('field_update_data-file', matm_log)
field_update_indices = dicts_to_list('field_update_data-index', matm_log)
tmp_chk = filter(lambda x : 'checksum' in x, matm_log.splitlines())
checksums = []
for c in tmp_chk:
checksums.append(ast.literal_eval(c.strip()))

# Parse forcing.json
forcing_config = os.path.join(exp_dir, 'forcing.json')
with open(forcing_config) as f:
Expand Down Expand Up @@ -196,14 +216,14 @@ def test_restart(self, helper, exp):
pass

@pytest.mark.very_slow
def test_exp_and_forcing_date_sync(self, helper, exp):
def test_iaf_cycles(self, helper, exp_fast):
"""
Test that experiment and forcing dates are always in sync.
Esp relevant for multi-cycle IAF run, see:
https://github.com/COSIMA/access-om2/issues/149
"""

ret, output, log, matm_log = helper.run_exp(exp, years_duration=1)
ret, output, log, matm_log = helper.run_exp(exp_fast, years_duration=1)
assert ret == 0

0 comments on commit e51e6a8

Please sign in to comment.