Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open multiple files using xcdat #926

Merged
merged 6 commits into from
Apr 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 40 additions & 18 deletions .github/workflows/build_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,19 +31,19 @@ jobs:
with:
node-version: '16'
- name: Checkout Code Repository
uses: actions/checkout@v2
uses: actions/checkout@v3
with:
node-version: '16'
- name: Set up Python 3.9
uses: actions/setup-python@v2
- name: Set up Python
uses: actions/setup-python@v4
with:
node-version: '16'
python-version: 3.9
python-version: '3.10'
# Run all pre-commit hooks on all the files.
# Getting only staged files can be tricky in case a new PR is opened
# since the action is run on a branch in detached head state
- name: Install and Run Pre-commit
uses: pre-commit/action@v2.0.3
uses: pre-commit/action@v3.0.0
with:
node-version: '16'

Expand All @@ -59,26 +59,48 @@ jobs:
- uses: actions/setup-node@v3
with:
node-version: '16'
- uses: actions/checkout@v2
- name: Cache Conda
uses: actions/cache@v2
env:
# Increase this value to reset cache if conda/dev.yml has not changed in the workflow
CACHE_NUMBER: 0
with:
node-version: '16'
path: ~/conda_pkgs_dir
key: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-publish
- uses: actions/checkout@v3

- name: Set up Conda Environment
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: "pcmdi_metrics_dev"
environment-file: conda-env/dev.yml
miniforge-variant: Mambaforge
miniforge-version: latest
use-mamba: true
mamba-version: "*"
channel-priority: strict
auto-update-conda: true
# IMPORTANT: This needs to be set for caching to work properly!
use-only-tar-bz2: true

# Used for refreshing the cache every 24 hours to avoid inconsistencies of package
# versions between the CI pipeline and local installations.
- name: Get Date
id: get-date
run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT
shell: bash

- name: Cache Conda
uses: actions/cache@v3
with:
node-version: '16'
path: ${{ env.CONDA }}/envs
key:
conda-${{ runner.os }}--${{ runner.arch }}--${{
steps.get-date.outputs.today }}-${{
hashFiles('conda-env/dev.yml') }}-${{ env.CACHE_NUMBER}}
env:
# Increase this value to reset cache if conda/dev.yml has not changed in the workflow
CACHE_NUMBER: 0

- name: Update environment
run:
mamba env update -n pcmdi_metrics_dev -f conda-env/dev.yml
if: steps.cache.outputs.cache-hit != 'true'

- name: Install pcmdi_metrics
# Source: https://github.com/conda/conda-build/issues/4251#issuecomment-1053460542
run: |
python -m pip install --no-build-isolation --no-deps -e .

- name: Run Tests
run: |
Expand Down
30 changes: 15 additions & 15 deletions conda-env/dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,34 +6,34 @@ channels:
dependencies:
# Base
# ==================
- python=3.9.7
- pip=21.2.4
- numpy=1.21.3
- cartopy=0.20.1
- matplotlib=3.4.3
- python=3.10.10
- pip=23.1.2
- numpy=1.23.5
- cartopy=0.21.1
- matplotlib=3.7.1
- cdat_info=8.2.1
- cdms2=3.1.5
- genutil=8.2.1
- cdutil=8.2.1
- cdp=1.7.0
- eofs=1.4.0
- seaborn=0.11.1
- seaborn=0.12.2
- enso_metrics=1.1.1
- xcdat=0.4.0
- xcdat=0.5.0
- xmltodict=0.13.0
- setuptools=65.5.0
- netcdf4=1.6.0
- setuptools=67.7.2
- netcdf4=1.6.3
- regionmask=0.9.0
- rasterio=1.2.10
- shapely=1.8.0
- rasterio=1.3.6
- shapely=2.0.1
# Testing
# ==================
- pre_commit=2.20.0
- pytest=6.2.5
- pytest-cov=3.0.0
- pre_commit=3.2.2
- pytest=7.3.1
- pytest-cov=4.0.0
# Developer Tools
# ==================
- jupyterlab=3.2.1
- jupyterlab=3.6.3
- nb_conda=2.2.1
- nb_conda_kernels=2.3.1

Expand Down
7 changes: 4 additions & 3 deletions pcmdi_metrics/io/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@

import pcmdi_metrics
from pcmdi_metrics import LOG_LEVEL
from pcmdi_metrics.io import xcdat_open

import xcdat as xc

value = 0
cdms2.setNetcdfShuffleFlag(value) # where value is either 0 or 1
Expand Down Expand Up @@ -371,9 +372,9 @@ def extract_var_from_file(self, var, var_in_file, *args, **kwargs):
var_in_file = var

try:
ds = xcdat_open(self(), data_var=var_in_file, decode_times=True)
ds = xc.open_mfdataset(self(), data_var=var_in_file, decode_times=True)
except Exception:
ds = xcdat_open(self(), data_var=var_in_file, decode_times=False) # Temporary part to read in cdms written obs4MIP AC files
ds = xc.open_mfdataset(self(), data_var=var_in_file, decode_times=False) # Temporary part to read in cdms written obs4MIP AC files

if 'level' in list(kwargs.keys()):
level = kwargs['level']
Expand Down
4 changes: 2 additions & 2 deletions pcmdi_metrics/mean_climate/lib/calculate_climatology.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import dask
from genutil import StringConstructor

from pcmdi_metrics.io import xcdat_open
import xcdat as xc


def calculate_climatology(
Expand All @@ -21,7 +21,7 @@ def calculate_climatology(
print("infilename:", infilename)

# open file
d = xcdat_open(infile, data_var=var) # wrapper of xcdat open functions to enable using xml
d = xc.open_mfdataset(infile, data_var=var)
atts = d.attrs

print("type(d):", type(d))
Expand Down
3 changes: 1 addition & 2 deletions pcmdi_metrics/mean_climate/lib/load_and_regrid.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from pcmdi_metrics.io import xcdat_open
import cftime
import xcdat as xc
import numpy as np
Expand All @@ -23,7 +22,7 @@ def load_and_regrid(data_path, varname, varname_in_file=None, level=None, t_grid
varname_in_file = varname

# load data
ds = xcdat_open(data_path, data_var=varname_in_file, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat
ds = xc.open_mfdataset(data_path, data_var=varname_in_file, decode_times=decode_times) # NOTE: decode_times=False will be removed once obs4MIP written using xcdat

# calendar quality check
if "calendar" in list(ds.time.attrs.keys()):
Expand Down
30 changes: 1 addition & 29 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
else:
install_dev = False

release_version = "3.0.2"
release_version = "3.1"

p = subprocess.Popen(
("git", "describe", "--tags"),
Expand All @@ -20,34 +20,6 @@
stderr=subprocess.PIPE,
)

"""
try:
descr = p.stdout.readlines()[0].strip().decode("utf-8")
Version = "-".join(descr.split("-")[:-2])
if Version == "":
Version = descr
except Exception:
descr = release_version
Version = release_version

p = subprocess.Popen(
("git", "log", "-n1", "--pretty=short"),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
try:
commit = p.stdout.readlines()[0].split()[1].decode("utf-8")
except Exception:
commit = ""

f = open("pcmdi_metrics/version.py", "w")
print("__version__ = '%s'" % Version, file=f)
print("__git_tag_describe__ = '%s'" % descr, file=f)
print("__git_sha1__ = '%s'" % commit, file=f)
f.close()
"""

# Generate and install default arguments
p = subprocess.Popen(["python", "setup_default_args.py"], cwd="share")
p.communicate()
Expand Down