Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into corr-bug
Browse files Browse the repository at this point in the history
* upstream/main: (86 commits)
  Fixed a mispelling of dimension in dataarray documentation for from_dict (pydata#6020)
  [pre-commit.ci] pre-commit autoupdate (pydata#6014)
  [pre-commit.ci] pre-commit autoupdate (pydata#5990)
  Use set_options for asv bottleneck tests (pydata#5986)
  Fix module name retrieval in `backend.plugins.remove_duplicates()`, plugin tests (pydata#5959)
  Check for py version instead of try/except when importing entry_points (pydata#5988)
  Add "see also" in to_dataframe docs (pydata#5978)
  Alternate method using inline css to hide regular html output in an untrusted notebook (pydata#5880)
  Fix mypy issue with entry_points (pydata#5979)
  Remove pre-commit auto update (pydata#5958)
  Do not change coordinate inplace when throwing error (pydata#5957)
  Create CITATION.cff (pydata#5956)
  Add groupby & resample benchmarks (pydata#5922)
  Fix plot.line crash for data of shape (1, N) in _title_for_slice on format_item (pydata#5948)
  Disable unit test comments (pydata#5946)
  Publish test results from workflow_run only (pydata#5947)
  Generator for groupby reductions (pydata#5871)
  whats-new dev
  whats-new for 0.20.1 (pydata#5943)
  Docs: fix URL for PTSA (pydata#5935)
  ...
  • Loading branch information
dcherian committed Nov 24, 2021
2 parents 3dde39b + 5db4046 commit cd2d34d
Show file tree
Hide file tree
Showing 137 changed files with 7,059 additions and 1,612 deletions.
74 changes: 74 additions & 0 deletions .github/workflows/benchmarks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
name: Benchmark

on:
pull_request:
types: [opened, reopened, synchronize, labeled]
workflow_dispatch:

jobs:
benchmark:
if: ${{ contains( github.event.pull_request.labels.*.name, 'run-benchmark') && github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch' }}
name: Linux
runs-on: ubuntu-20.04
env:
ASV_DIR: "./asv_bench"

steps:
# We need the full repo to avoid this issue
# https://github.com/actions/checkout/issues/23
- uses: actions/checkout@v2
with:
fetch-depth: 0

- name: Setup Miniconda
uses: conda-incubator/setup-miniconda@v2
with:
# installer-url: https://github.com/conda-forge/miniforge/releases/latest/download/Mambaforge-Linux-x86_64.sh
installer-url: https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh

- name: Setup some dependencies
shell: bash -l {0}
run: |
pip install asv
sudo apt-get update -y
- name: Run benchmarks
shell: bash -l {0}
id: benchmark
env:
OPENBLAS_NUM_THREADS: 1
MKL_NUM_THREADS: 1
OMP_NUM_THREADS: 1
ASV_FACTOR: 1.5
ASV_SKIP_SLOW: 1
run: |
set -x
# ID this runner
asv machine --yes
echo "Baseline: ${{ github.event.pull_request.base.sha }} (${{ github.event.pull_request.base.label }})"
echo "Contender: ${GITHUB_SHA} (${{ github.event.pull_request.head.label }})"
# Use mamba for env creation
# export CONDA_EXE=$(which mamba)
export CONDA_EXE=$(which conda)
# Run benchmarks for current commit against base
ASV_OPTIONS="--split --show-stderr --factor $ASV_FACTOR"
asv continuous $ASV_OPTIONS ${{ github.event.pull_request.base.sha }} ${GITHUB_SHA} \
| sed "/Traceback \|failed$\|PERFORMANCE DECREASED/ s/^/::error::/" \
| tee benchmarks.log
# Report and export results for subsequent steps
if grep "Traceback \|failed\|PERFORMANCE DECREASED" benchmarks.log > /dev/null ; then
exit 1
fi
working-directory: ${{ env.ASV_DIR }}

- name: Add instructions to artifact
if: always()
run: |
cp benchmarks/README_CI.md benchmarks.log .asv/results/
working-directory: ${{ env.ASV_DIR }}

- uses: actions/upload-artifact@v2
if: always()
with:
name: asv-benchmark-results-${{ runner.os }}
path: ${{ env.ASV_DIR }}/.asv/results
2 changes: 1 addition & 1 deletion .github/workflows/ci-additional.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ jobs:
$PYTEST_EXTRA_FLAGS
- name: Upload code coverage to Codecov
uses: codecov/codecov-action@v2.0.3
uses: codecov/codecov-action@v2.1.0
with:
file: ./coverage.xml
flags: unittests,${{ matrix.env }}
Expand Down
44 changes: 0 additions & 44 deletions .github/workflows/ci-pre-commit-autoupdate.yaml

This file was deleted.

21 changes: 7 additions & 14 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -100,27 +100,20 @@ jobs:
path: pytest.xml

- name: Upload code coverage to Codecov
uses: codecov/codecov-action@v2.0.3
uses: codecov/codecov-action@v2.1.0
with:
file: ./coverage.xml
flags: unittests
env_vars: RUNNER_OS,PYTHON_VERSION
name: codecov-umbrella
fail_ci_if_error: false

publish-test-results:
needs: test
event_file:
name: "Event File"
runs-on: ubuntu-latest
# the build-and-test job might be skipped, we don't need to run this job then
if: success() || failure()

steps:
- name: Download Artifacts
uses: actions/download-artifact@v2
with:
path: test-results

- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
- name: Upload
uses: actions/upload-artifact@v2
with:
files: test-results/**/*.xml
name: Event File
path: ${{ github.event_path }}
18 changes: 7 additions & 11 deletions .github/workflows/publish-test-results.yaml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copied from https://github.com/EnricoMi/publish-unit-test-result-action/blob/v1.18/README.md#support-fork-repositories-and-dependabot-branches
# Copied from https://github.com/EnricoMi/publish-unit-test-result-action/blob/v1.23/README.md#support-fork-repositories-and-dependabot-branches

name: Publish test results

Expand All @@ -12,11 +12,7 @@ jobs:
publish-test-results:
name: Publish test results
runs-on: ubuntu-latest
if: >
github.event.workflow_run.conclusion != 'skipped' && (
github.event.sender.login == 'dependabot[bot]' ||
github.event.workflow_run.head_repository.full_name != github.repository
)
if: github.event.workflow_run.conclusion != 'skipped'

steps:
- name: Download and extract artifacts
Expand All @@ -26,13 +22,10 @@ jobs:
mkdir artifacts && cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
artifacts=$(gh api $artifacts_url -q '.artifacts[] | {name: .name, url: .archive_download_url}')
IFS=$'\n'
for artifact in $artifacts
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
name=$(jq -r .name <<<$artifact)
url=$(jq -r .url <<<$artifact)
IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip"
unzip -d "$name" "$name.zip"
done
Expand All @@ -41,4 +34,7 @@ jobs:
uses: EnricoMi/publish-unit-test-result-action@v1
with:
commit: ${{ github.event.workflow_run.head_sha }}
event_file: artifacts/Event File/event.json
event_name: ${{ github.event.workflow_run.event }}
files: "artifacts/**/*.xml"
comment_mode: off
6 changes: 3 additions & 3 deletions .github/workflows/upstream-dev-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ jobs:
shopt -s globstar
python .github/workflows/parse_logs.py logs/**/*-log
- name: Report failures
uses: actions/github-script@v4.1
uses: actions/github-script@v5
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
Expand Down Expand Up @@ -158,15 +158,15 @@ jobs:
// If no issue is open, create a new issue,
// else update the body of the existing issue.
if (result.repository.issues.edges.length === 0) {
github.issues.create({
github.rest.issues.create({
owner: variables.owner,
repo: variables.name,
body: issue_body,
title: title,
labels: [variables.label]
})
} else {
github.issues.update({
github.rest.issues.update({
owner: variables.owner,
repo: variables.name,
issue_number: result.repository.issues.edges[0].node.number,
Expand Down
19 changes: 11 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,20 +8,22 @@ repos:
- id: check-yaml
# isort should run before black as black sometimes tweaks the isort output
- repo: https://github.com/PyCQA/isort
rev: 5.9.3
rev: 5.10.1
hooks:
- id: isort
# https://github.com/python/black#version-control-integration
- repo: https://github.com/psf/black
rev: 21.7b0
rev: 21.11b1
hooks:
- id: black
- id: black-jupyter
- repo: https://github.com/keewis/blackdoc
rev: v0.3.4
hooks:
- id: blackdoc
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2
exclude: "generate_reductions.py"
- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
hooks:
- id: flake8
# - repo: https://github.com/Carreau/velin
Expand All @@ -30,20 +32,21 @@ repos:
# - id: velin
# args: ["--write", "--compact"]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.910
rev: v0.910-1
hooks:
- id: mypy
# Copied from setup.cfg
exclude: "properties|asv_bench"
# `properies` & `asv_bench` are copied from setup.cfg.
# `_typed_ops.py` is added since otherwise mypy will complain (but notably only in pre-commit)
exclude: "properties|asv_bench|_typed_ops.py"
additional_dependencies: [
# Type stubs
types-python-dateutil,
types-pkg_resources,
types-PyYAML,
types-pytz,
typing-extensions==3.10.0.0,
# Dependencies that are typed
numpy,
typing-extensions==3.10.0.0,
]
# run this occasionally, ref discussion https://github.com/pydata/xarray/pull/3194
# - repo: https://github.com/asottile/pyupgrade
Expand Down
96 changes: 96 additions & 0 deletions CITATION.cff
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
cff-version: 1.2.0
message: "If you use this software, please cite it as below."
authors:
- family-names: "Hoyer"
given-names: "Stephan"
orcid: "https://orcid.org/0000-0002-5207-0380"
- family-names: "Roos"
given-names: "Maximilian"
- family-names: "Joseph"
given-names: "Hamman"
orcid: "https://orcid.org/0000-0001-7479-8439"
- family-names: "Magin"
given-names: "Justus"
- family-names: "Cherian"
given-names: "Deepak"
orcid: "https://orcid.org/0000-0002-6861-8734"
- family-names: "Fitzgerald"
given-names: "Clark"
orcid: "https://orcid.org/0000-0003-3446-6389"
- family-names: "Hauser"
given-names: "Mathias"
orcid: "https://orcid.org/0000-0002-0057-4878"
- family-names: "Fujii"
given-names: "Keisuke"
orcid: "https://orcid.org/0000-0003-0390-9984"
- family-names: "Maussion"
given-names: "Fabien"
orcid: "https://orcid.org/0000-0002-3211-506X"
- family-names: "Imperiale"
given-names: "Guido"
- family-names: "Clark"
given-names: "Spencer"
orcid: "https://orcid.org/0000-0001-5595-7895"
- family-names: "Kleeman"
given-names: "Alex"
- family-names: "Nicholas"
given-names: "Thomas"
orcid: "https://orcid.org/0000-0002-2176-0530"
- family-names: "Kluyver"
given-names: "Thomas"
orcid: "https://orcid.org/0000-0003-4020-6364"
- family-names: "Westling"
given-names: "Jimmy"
- family-names: "Munroe"
given-names: "James"
orcid: "https://orcid.org/0000-0001-9098-6309"
- family-names: "Amici"
given-names: "Alessandro"
orcid: "https://orcid.org/0000-0002-1778-4505"
- family-names: "Barghini"
given-names: "Aureliana"
- family-names: "Banihirwe"
given-names: "Anderson"
orcid: "https://orcid.org/0000-0001-6583-571X"
- family-names: "Bell"
given-names: "Ray"
orcid: "https://orcid.org/0000-0003-2623-0587"
- family-names: "Hatfield-Dodds"
given-names: "Zac"
orcid: "https://orcid.org/0000-0002-8646-8362"
- family-names: "Abernathey"
given-names: "Ryan"
orcid: "https://orcid.org/0000-0001-5999-4917"
- family-names: "Bovy"
given-names: "Benoît"
- family-names: "Omotani"
given-names: "John"
orcid: "https://orcid.org/0000-0002-3156-8227"
- family-names: "Mühlbauer"
given-names: "Kai"
orcid: "https://orcid.org/0000-0001-6599-1034"
- family-names: "Roszko"
given-names: "Maximilian K."
orcid: "https://orcid.org/0000-0001-9424-2526"
- family-names: "Wolfram"
given-names: "Phillip J."
orcid: "https://orcid.org/0000-0001-5971-4241"
title: "xarray"
doi: 10.5281/zenodo.598201
url: "https://github.com/pydata/xarray"
preferred-citation:
type: article
authors:
- family-names: "Hoyer"
given-names: "Stephan"
orcid: "https://orcid.org/0000-0002-5207-0380"
- family-names: "Joseph"
given-names: "Hamman"
orcid: "https://orcid.org/0000-0001-7479-8439"
doi: "10.5334/jors.148"
journal: "Journal of Open Research Software"
month: 4
title: "xarray: N-D labeled Arrays and Datasets in Python"
volume: 5
issue: 1
year: 2017
Loading

0 comments on commit cd2d34d

Please sign in to comment.