diff --git a/docs/source/dev_guide/testing.rst b/docs/source/dev_guide/testing.rst index 59292286..fb164651 100644 --- a/docs/source/dev_guide/testing.rst +++ b/docs/source/dev_guide/testing.rst @@ -31,6 +31,4 @@ Automated tests We have a :ref:`GitHub Actions ` Continuous Integration / Continuous Delivery (CI/CD) workflow. -The unit tests are run automatically as part of this. As mentioned earlier, -integration tests must be run on an LCRC machine. - +The unit tests are run automatically as part of this. Integration tests must be run on a machine specified above. diff --git a/examples/example_generic.cfg b/examples/example_generic.cfg deleted file mode 100644 index 43893e41..00000000 --- a/examples/example_generic.cfg +++ /dev/null @@ -1,123 +0,0 @@ -# Edit example_generic.cfg or example_generic.py, -# not the generated example files (e.g., example_compy.cfg) - -[default] -case = 20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis -environment_commands = "#expand environment_commands#" -input = #expand input# -input_subdir = archive/atm/hist -mapping_file = #expand mapping_file# -output = #expand output# -partition = #expand partition# -walltime = "02:00:00" -www = #expand www# - -[climo] -active = True -years = "51:55:2", "51:55:4", - - [[ atm_monthly_180x360_aave ]] - frequency = "monthly" - - [[ atm_monthly_diurnal_8xdaily_180x360_aave ]] - frequency = "diurnal_8xdaily" - input_files = "eam.h4" - vars = "PRECT" - -[ts] -active = True -frequency = "monthly" -years = "51:55:2", - - [[ atm_monthly_180x360_aave ]] - input_files = "eam.h0" - - [[ atm_daily_180x360_aave ]] - frequency = "daily" - input_files = "eam.h1" - vars = "PRECT" - - [[ atm_monthly_glb ]] - input_files = "eam.h0" - input_subdir = "archive/atm/hist" - mapping_file = "glb" - years = "51:61:5", - - [[ land_monthly ]] - input_files = "elm.h0" - input_subdir = "archive/lnd/hist" - vars = "FSH,LAISHA,LAISUN,RH2M" - - [[ rof_monthly ]] - extra_vars = 'areatotal2' - input_files = "mosart.h0" - input_subdir = "archive/rof/hist" - mapping_file = "" - vars = "RIVER_DISCHARGE_OVER_LAND_LIQ" - -[tc_analysis] -active = True -scratch = #expand scratch# -years = "51:53:2", - -[e3sm_diags] -active = True -grid = '180x360_aave' -obs_ts = #expand obs_ts# -ref_final_yr = 2014 -ref_start_yr = 1985 -reference_data_path = #expand reference_data_path# -sets = "lat_lon","zonal_mean_xy","zonal_mean_2d","polar","cosp_histogram","meridional_mean_2d","enso_diags","qbo","diurnal_cycle","annual_cycle_zonal_mean","streamflow", "zonal_mean_2d_stratosphere", "tc_analysis", -short_name = '20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis' -ts_num_years = 2 -years = "51:55:2", "51:55:4", - - [[ atm_monthly_180x360_aave ]] - climo_diurnal_frequency = "diurnal_8xdaily" - climo_diurnal_subsection = "atm_monthly_diurnal_8xdaily_180x360_aave" - sets = "lat_lon","zonal_mean_xy","zonal_mean_2d","polar","cosp_histogram","meridional_mean_2d","enso_diags","qbo","diurnal_cycle","annual_cycle_zonal_mean","streamflow", "zonal_mean_2d_stratosphere", - - [[ atm_monthly_180x360_aave_tc_analysis ]] - # Running as its own subtask because tc_analysis requires jobs to run sequentially, which slows down testing - sets = "tc_analysis", - tc_obs = #expand tc_obs# - years = "51:53:2", - - [[ atm_monthly_180x360_aave_mvm ]] - # Test model-vs-model using the same files as the reference - climo_diurnal_frequency = "diurnal_8xdaily" - climo_diurnal_subsection = "atm_monthly_diurnal_8xdaily_180x360_aave" - climo_subsection = "atm_monthly_180x360_aave" - diff_title = "Difference" - ref_final_yr = 52 - ref_name = "20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis" - ref_start_yr = 51 - ref_years = "51-52", - reference_data_path = #expand reference_data_path_mvm# - run_type = "model_vs_model" - short_ref_name = "20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis" - swap_test_ref = False - tag = "model_vs_model" - ts_num_years_ref = 2 - ts_subsection = "atm_monthly_180x360_aave" - -[mpas_analysis] -active = True -anomalyRefYear = 51 -climo_years ="51-55", "56-61", -enso_years = "51-55", "56-61", -mesh = "EC30to60E2r2" -parallelTaskCount = 6 -# Requires a longer time limit than permitted by "#expand partition#" -partition = #expand partition_mpas# -ts_years = "51-55", "51-61", - -[global_time_series] -active = True -climo_years ="51-55", "56-61", -experiment_name = "20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis" -figstr = "20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis" -moc_file=mocTimeSeries_0051-0061.nc -ts_num_years = 5 -ts_years = "51-55", "51-61", -years = "51-61", diff --git a/examples/generate_examples.py b/examples/generate_examples.py deleted file mode 100644 index 81620122..00000000 --- a/examples/generate_examples.py +++ /dev/null @@ -1,80 +0,0 @@ -import re -import subprocess - -from mache import MachineInfo - -# Script to generate standard example configurations for different machines. -# Inspired by https://github.com/E3SM-Project/e3sm_diags/blob/master/docs/source/quickguides/generate_quick_guides.py - - -def get_expansions(): - machine_info = MachineInfo() - config = machine_info.config - machine = machine_info.machine - if machine == "compy": - expansions = get_compy_expansions(config) - else: - raise ValueError(f"Unsupported machine={machine}") - return expansions - - -def get_compy_expansions(config): - diags_base_path = config.get("diagnostics", "base_path") - web_base_path = config.get("web_portal", "base_path") - # Note: `os.environ.get("USER")` also works. Here we're already using mache but not os, so using mache. - username = config.get("web_portal", "username") - d = { - # [default] - "environment_commands": "source /share/apps/E3SM/conda_envs/load_latest_e3sm_unified_compy.sh", - # `input` data is only in this directory, so not using `username` - "input": "/compyfs/fors729/e3sm_unified_test_zppy/20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis", - "mapping_file": "/compyfs/zender/maps/map_ne30pg2_to_cmip6_180x360_aave.20200201.nc", - "output": f"/qfs/people/{username}/zppy_complete_run_compy_output/20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis", - "partition": "short", - "www": f"{web_base_path}/{username}/zppy_complete_run_compy_output", - # [tc_analysis] - "scratch": f"/qfs/people/{username}", - # [e3sm_diags] - "obs_ts": f"{diags_base_path}/observations/Atm/time-series", - "reference_data_path": f"{diags_base_path}/observations/Atm/climatology", - # [e3sm_diags] > [[ atm_monthly_180x360_aave_tc_analysis ]] - "tc_obs": "/compyfs/e3sm_diags_data/obs_for_e3sm_diags/tc-analysis", - # [e3sm_diags] > [[ atm_monthly_180x360_aave_mvm ]] - "ref_name": "20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis", - "reference_data_path_mvm": "/qfs/people/fors729/zppy_complete_run_compy_output/20210528.v2rc3e.piControl.ne30pg2_EC30to60E2r2.chrysalis/post/atm/180x360_aave/clim", - # [mpas_analysis] - "partition_mpas": "slurm", - } - return d - - -# Important: Do not have more than one `#expand` operation per line. The code will only expand the first one. -def generate_cfg(): - git_top_level = ( - subprocess.check_output("git rev-parse --show-toplevel".split()) - .strip() - .decode("utf-8") - ) - cfg_dir = f"{git_top_level}/examples/" - generic_cfg = f"{cfg_dir}example_generic.cfg" - machine = MachineInfo().machine - cfg_name = f"{cfg_dir}example_{machine}.cfg" - expansions = get_expansions() - with open(generic_cfg, "r") as file_read: - with open(cfg_name, "w") as file_write: - # For line in file, if line matches #expand #, then replace text with . - # Send output to the corresponding specific cfg file. - for line in file_read: - match_object = re.search("#expand ([^#]*)#", line) - if match_object is None: - new_line = line - else: - expansion_indicator = match_object.group(0) - expansion_name = match_object.group(1) - expansion = expansions[expansion_name] - new_line = line.replace(expansion_indicator, expansion) - file_write.write(new_line) - - -if __name__ == "__main__": - generate_cfg() diff --git a/tests/integration/generated/directions_compy.md b/tests/integration/generated/directions_compy.md new file mode 100644 index 00000000..7a8c0c37 --- /dev/null +++ b/tests/integration/generated/directions_compy.md @@ -0,0 +1,91 @@ +# Testing directions for compy + +## Commands to run before running integration tests + +### test_bundles + +``` +rm -rf /compyfs/www//fors729/zppy_test_bundles_www/v2.LR.historical_0201 +rm -rf /compyfs/fors729/zppy_test_bundles_output/v2.LR.historical_0201/post +# Generate cfg +python tests/integration/utils.py +zppy -c tests/integration/generated/test_bundles.cfg +# bundle1 and bundle2 should run. After they finish, invoke zppy again to resolve remaining dependencies: +zppy -c tests/integration/generated/test_bundles.cfg +# bundle3 and ilamb should run +``` + +### test_complete_run + +``` +rm -rf /compyfs/www//fors729/zppy_test_complete_run_www/v2.LR.historical_0201 +rm -rf /compyfs/fors729/zppy_test_complete_run_output/v2.LR.historical_0201/post +# Generate cfg +python tests/integration/utils.py +zppy -c tests/integration/generated/test_complete_run.cfg +``` + +## Commands to run to replace outdated expected files + +### test_bash_generation + +``` +rm -rf /compyfs/www/zppy_test_resources/expected_bash_files +cd +# Your output will now become the new expectation. +# You can just move (i.e., not copy) the output since re-running this test will re-generate the output. +mv test_bash_generation_output/post/scripts /compyfs/www/zppy_test_resources/expected_bash_files +# Rerun test +python -u -m unittest tests/integration/test_bash_generation.py +``` + +#### test_bundles + +``` +rm -rf /compyfs/www/zppy_test_resources/expected_bundles +# Your output will now become the new expectation. +# Copy output so you don't have to rerun zppy to generate the output. +cp -r /compyfs/www//fors729/zppy_test_bundles_www/v2.LR.historical_0201 /compyfs/www/zppy_test_resources/expected_bundles +mkdir -p /compyfs/www/zppy_test_resources/expected_bundles/bundle_files +cp -r /compyfs/fors729/zppy_test_bundles_output/v2.LR.historical_0201/post/scripts/bundle*.bash /compyfs/www/zppy_test_resources/expected_bundles/bundle_files +cd /compyfs/www/zppy_test_resources/expected_bundles +# This file will list all the expected images. +find . -type f -name '*.png' > ../image_list_expected_bundles.txt +cd +# Rerun test +python -u -m unittest tests/integration/test_bundles.py +``` + +### test_campaign + +``` +cd +./tests/integration/generated/update_campaign_expected_files_compy.sh +``` + +### test_complete_run + +``` +rm -rf /compyfs/www/zppy_test_resources/expected_complete_run +# Your output will now become the new expectation. +# Copy output so you don't have to rerun zppy to generate the output. +cp -r /compyfs/www//fors729/zppy_test_complete_run_www/v2.LR.historical_0201 /compyfs/www/zppy_test_resources/expected_complete_run +cd /compyfs/www/zppy_test_resources/expected_complete_run +# This file will list all the expected images. +find . -type f -name '*.png' > ../image_list_expected_complete_run.txt +cd +# Rerun test +python -u -m unittest tests/integration/test_complete_run.py +``` + +### test_defaults + +``` +rm -rf /compyfs/www/zppy_test_resources/test_defaults_expected_files +mkdir -p /compyfs/www/zppy_test_resources/test_defaults_expected_files +# Your output will now become the new expectation. +# You can just move (i.e., not copy) the output since re-running this test will re-generate the output. +mv test_defaults_output/post/scripts/*.settings /compyfs/www/zppy_test_resources/test_defaults_expected_files +# Rerun test +python -u -m unittest tests/integration/test_defaults.py +``` diff --git a/tests/integration/generated/update_campaign_expected_files_compy.sh b/tests/integration/generated/update_campaign_expected_files_compy.sh new file mode 100755 index 00000000..5e3779c7 --- /dev/null +++ b/tests/integration/generated/update_campaign_expected_files_compy.sh @@ -0,0 +1,16 @@ +# Run this script to update expected files for test_campaign.py +# Run from the top level of the zppy repo +# Run as `./tests/integration/update_campaign_expected_files.sh` + +for campaign in "cryosphere" "cryosphere_override" "high_res_v1" "none" "water_cycle" "water_cycle_override" +do + echo ${campaign} + rm -rf /compyfs/www/zppy_test_resources/test_campaign_${campaign}_expected_files + mkdir -p /compyfs/www/zppy_test_resources/test_campaign_${campaign}_expected_files + # Your output will now become the new expectation. + # You can just move (i.e., not copy) the output since re-running this test will re-generate the output. + mv test_campaign_${campaign}_output/post/scripts/*.settings /compyfs/www/zppy_test_resources/test_campaign_${campaign}_expected_files +done + +# Rerun test +python -m unittest tests/integration/test_campaign.py diff --git a/tests/integration/template_bundles.cfg b/tests/integration/template_bundles.cfg index 82d601fd..dcc3f335 100644 --- a/tests/integration/template_bundles.cfg +++ b/tests/integration/template_bundles.cfg @@ -6,7 +6,8 @@ input_subdir = archive/atm/hist mapping_file = "#expand mapping_path#map_ne30pg2_to_cmip6_180x360_aave.20200201.nc" # To run this test, edit `output` and `www` in this file, along with `actual_images_dir` in test_bundles.py output = "#expand user_output#zppy_test_bundles_output/v2.LR.historical_0201" -partition = debug +partition = "#expand partition_short#" +qos = "#expand qos_short#" walltime = "02:00:00" www = "#expand user_www#zppy_test_bundles_www" diff --git a/tests/integration/template_complete_run.cfg b/tests/integration/template_complete_run.cfg index 93de0aa0..5d2d2c3f 100644 --- a/tests/integration/template_complete_run.cfg +++ b/tests/integration/template_complete_run.cfg @@ -6,7 +6,8 @@ input_subdir = archive/atm/hist mapping_file = "#expand mapping_path#map_ne30pg2_to_cmip6_180x360_aave.20200201.nc" # To run this test, edit `output` and `www` in this file, along with `actual_images_dir` in test_complete_run.py output = "#expand user_output#zppy_test_complete_run_output/v2.LR.historical_0201" -partition = debug +partition = "#expand partition_short#" +qos = "#expand qos_short#" www = "#expand user_www#zppy_test_complete_run_www" [climo] @@ -86,9 +87,10 @@ years = "1850:1854:2", "1850:1854:4", [[ atm_monthly_180x360_aave ]] climo_diurnal_frequency = "diurnal_8xdaily" climo_diurnal_subsection = "atm_monthly_diurnal_8xdaily_180x360_aave" - partition = compute + partition = "#expand partition_long#" + qos = "#expand qos_long#" sets = "lat_lon","zonal_mean_xy","zonal_mean_2d","polar","cosp_histogram","meridional_mean_2d","enso_diags","qbo","diurnal_cycle","annual_cycle_zonal_mean","streamflow", "zonal_mean_2d_stratosphere", - walltime = "01:00:00" + walltime = "#expand diags_walltime#" [[ atm_monthly_180x360_aave_environment_commands ]] environment_commands = "#expand diags_environment_commands#" @@ -108,6 +110,8 @@ years = "1850:1854:2", "1850:1854:4", climo_diurnal_subsection = "atm_monthly_diurnal_8xdaily_180x360_aave" climo_subsection = "atm_monthly_180x360_aave" diff_title = "Difference" + partition = "#expand partition_long#" + qos = "#expand qos_long#" ref_final_yr = 1851 ref_name = "v2.LR.historical_0201" ref_start_yr = 1850 @@ -119,6 +123,7 @@ years = "1850:1854:2", "1850:1854:4", tag = "model_vs_model" ts_num_years_ref = 2 ts_subsection = "atm_monthly_180x360_aave" + walltime = "#expand diags_walltime#" years = "1852-1853", [mpas_analysis] @@ -128,7 +133,8 @@ climo_years ="1850-1854", "1855-1860", enso_years = "1850-1854", "1855-1860", mesh = "EC30to60E2r2" parallelTaskCount = 6 -partition = compute +partition = "#expand partition_long#" +qos = "#expand qos_long#" ts_years = "1850-1854", "1850-1860", walltime = "00:30:00" diff --git a/tests/integration/template_directions.md b/tests/integration/template_directions.md index 75477ec1..4f8175e7 100644 --- a/tests/integration/template_directions.md +++ b/tests/integration/template_directions.md @@ -48,7 +48,7 @@ rm -rf #expand expected_dir#expected_bundles cp -r #expand user_www#zppy_test_bundles_www/v2.LR.historical_0201 #expand expected_dir#expected_bundles mkdir -p #expand expected_dir#expected_bundles/bundle_files cp -r #expand user_output#zppy_test_bundles_output/v2.LR.historical_0201/post/scripts/bundle*.bash #expand expected_dir#expected_bundles/bundle_files -cd #expected_bundles +cd #expand expected_dir#expected_bundles # This file will list all the expected images. find . -type f -name '*.png' > ../image_list_expected_bundles.txt cd diff --git a/tests/integration/utils.py b/tests/integration/utils.py index 1c26edf3..057c00f8 100644 --- a/tests/integration/utils.py +++ b/tests/integration/utils.py @@ -134,9 +134,14 @@ def get_chyrsalis_expansions(config): "diags_obs_climo": f"{diags_base_path}/observations/Atm/climatology/", "diags_obs_tc": f"{diags_base_path}/observations/Atm/tc-analysis/", "diags_obs_ts": f"{diags_base_path}/observations/Atm/time-series/", + "diags_walltime": "1:00:00", "environment_commands": f"source {unified_path}/load_latest_e3sm_unified_chrysalis.sh", "expected_dir": "/lcrc/group/e3sm/public_html/zppy_test_resources/", "mapping_path": "/home/ac.zender/data/maps/", + "partition_long": "compute", + "partition_short": "debug", + "qos_long": "regular", + "qos_short": "regular", "scratch": f"/lcrc/globalscratch/{username}/", "user_input": "/lcrc/group/e3sm/ac.forsyth2/", "user_output": f"/lcrc/group/e3sm/{username}/", @@ -145,12 +150,42 @@ def get_chyrsalis_expansions(config): return d +def get_compy_expansions(config): + diags_base_path = config.get("diagnostics", "base_path") + unified_path = config.get("e3sm_unified", "base_path") + # Note: `os.environ.get("USER")` also works. Here we're already using mache but not os, so using mache. + username = config.get("web_portal", "username") + web_base_path = config.get("web_portal", "base_path") + d = { + # To run this test, replace conda environment with your e3sm_diags dev environment + "diags_environment_commands": "source /qfs/people/fors729/miniconda3/etc/profile.d/conda.sh; conda activate e3sm_diags_dev_20220722", + "diags_obs_climo": f"{diags_base_path}/observations/Atm/climatology/", + "diags_obs_tc": f"{diags_base_path}/observations/Atm/tc-analysis/", + "diags_obs_ts": f"{diags_base_path}/observations/Atm/time-series/", + "diags_walltime": "03:00:00", + "environment_commands": f"source {unified_path}/load_latest_e3sm_unified_compy.sh", + "expected_dir": "/compyfs/www/zppy_test_resources/", + "mapping_path": "/compyfs/zender/maps/", + "partition_long": "slurm", + "partition_short": "short", + "qos_long": "regular", + "qos_short": "regular", + "scratch": f"/qfs/people/{username}/", + "user_input": "/compyfs/fors729/", + "user_output": f"/compyfs/{username}/", + "user_www": f"{web_base_path}/{username}/", + } + return d + + def get_expansions(): machine_info = MachineInfo() config = machine_info.config machine = machine_info.machine if machine == "chrysalis": expansions = get_chyrsalis_expansions(config) + elif machine == "compy": + expansions = get_compy_expansions(config) else: raise ValueError(f"Unsupported machine={machine}") expansions["machine"] = machine