diff --git a/.binder/environment.yml b/.binder/environment.yml
index 13b6b99e6fc..6fd5829c5e6 100644
--- a/.binder/environment.yml
+++ b/.binder/environment.yml
@@ -2,7 +2,7 @@ name: xarray-examples
channels:
- conda-forge
dependencies:
- - python=3.7
+ - python=3.8
- boto3
- bottleneck
- cartopy
@@ -31,6 +31,7 @@ dependencies:
- rasterio
- scipy
- seaborn
+ - setuptools
- sparse
- toolz
- xarray
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index d78dd38dd85..a921bddaa23 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -2,5 +2,5 @@
- [ ] Closes #xxxx
- [ ] Tests added
- - [ ] Passes `black . && mypy . && flake8`
+ - [ ] Passes `isort -rc . && black . && mypy . && flake8`
- [ ] Fully documented, including `whats-new.rst` for all changes and `api.rst` for new API
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ed62c1c256e..9df95648774 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,11 +1,15 @@
# https://pre-commit.com/
-# https://github.com/python/black#version-control-integration
repos:
+ # isort should run before black as black sometimes tweaks the isort output
+ - repo: https://github.com/timothycrosley/isort
+ rev: 4.3.21-2
+ hooks:
+ - id: isort
+ # https://github.com/python/black#version-control-integration
- repo: https://github.com/python/black
rev: stable
hooks:
- id: black
- language_version: python3.7
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.2.3
hooks:
@@ -14,7 +18,7 @@ repos:
rev: v0.761 # Must match ci/requirements/*.yml
hooks:
- id: mypy
- # run these occasionally, ref discussion https://github.com/pydata/xarray/pull/3194
+ # run this occasionally, ref discussion https://github.com/pydata/xarray/pull/3194
# - repo: https://github.com/asottile/pyupgrade
# rev: v1.22.1
# hooks:
@@ -23,7 +27,3 @@ repos:
# - "--py3-only"
# # remove on f-strings in Py3.7
# - "--keep-percent-format"
- # - repo: https://github.com/timothycrosley/isort
- # rev: 4.3.21-2
- # hooks:
- # - id: isort
diff --git a/asv_bench/asv.conf.json b/asv_bench/asv.conf.json
index 11a779ae376..d35a2a223a2 100644
--- a/asv_bench/asv.conf.json
+++ b/asv_bench/asv.conf.json
@@ -40,7 +40,7 @@
// The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`.
- "pythons": ["3.6"],
+ "pythons": ["3.8"],
// The matrix of dependencies to test. Each key is the name of a
// package (in PyPI) and the values are version numbers. An empty
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index d6ee76c7d3f..5789161c966 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -20,11 +20,11 @@ jobs:
conda_env: py37
py38:
conda_env: py38
- py37-upstream-dev:
- conda_env: py37
+ py38-upstream-dev:
+ conda_env: py38
upstream_dev: true
- py36-flaky:
- conda_env: py36
+ py38-flaky:
+ conda_env: py38
pytest_extra_flags: --run-flaky --run-network-tests
allow_failure: true
pool:
@@ -35,8 +35,8 @@ jobs:
- job: MacOSX
strategy:
matrix:
- py36:
- conda_env: py36
+ py38:
+ conda_env: py38
pool:
vmImage: 'macOS-10.13'
steps:
@@ -74,7 +74,7 @@ jobs:
- job: TypeChecking
variables:
- conda_env: py37
+ conda_env: py38
pool:
vmImage: 'ubuntu-16.04'
steps:
@@ -84,6 +84,18 @@ jobs:
mypy .
displayName: mypy type checks
+- job: isort
+ variables:
+ conda_env: py38
+ pool:
+ vmImage: 'ubuntu-16.04'
+ steps:
+ - template: ci/azure/install.yml
+ - bash: |
+ source activate xarray-tests
+ isort -rc --check .
+ displayName: isort formatting checks
+
- job: MinimumVersionsPolicy
pool:
vmImage: 'ubuntu-16.04'
diff --git a/ci/azure/install.yml b/ci/azure/install.yml
index e4f3a0b9e16..e11a8b54db3 100644
--- a/ci/azure/install.yml
+++ b/ci/azure/install.yml
@@ -6,12 +6,14 @@ steps:
- template: add-conda-to-path.yml
- bash: |
+ conda update -y conda
conda env create -n xarray-tests --file ${{ parameters.env_file }}
displayName: Install conda dependencies
- bash: |
source activate xarray-tests
- pip install -f https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com \
+ python -m pip install \
+ -f https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com \
--no-deps \
--pre \
--upgrade \
@@ -19,7 +21,7 @@ steps:
numpy \
pandas \
scipy
- pip install \
+ python -m pip install \
--no-deps \
--upgrade \
git+https://github.com/dask/dask \
@@ -33,7 +35,7 @@ steps:
- bash: |
source activate xarray-tests
- pip install --no-deps -e .
+ python -m pip install --no-deps -e .
displayName: Install xarray
- bash: |
diff --git a/ci/min_deps_check.py b/ci/min_deps_check.py
index a5ba90679b7..527093cf5bc 100755
--- a/ci/min_deps_check.py
+++ b/ci/min_deps_check.py
@@ -15,6 +15,7 @@
"coveralls",
"flake8",
"hypothesis",
+ "isort",
"mypy",
"pip",
"pytest",
diff --git a/ci/requirements/doc.yml b/ci/requirements/doc.yml
index a8b72dc0956..2c44e754cc4 100644
--- a/ci/requirements/doc.yml
+++ b/ci/requirements/doc.yml
@@ -3,7 +3,7 @@ channels:
# Don't change to pkgs/main, as it causes random timeouts in readthedocs
- conda-forge
dependencies:
- - python=3.7
+ - python=3.8
- bottleneck
- cartopy
- cfgrib
@@ -20,6 +20,7 @@ dependencies:
- pandas
- rasterio
- seaborn
+ - setuptools
- sphinx
- sphinx_rtd_theme
- zarr
diff --git a/ci/requirements/py36-bare-minimum.yml b/ci/requirements/py36-bare-minimum.yml
index 8b604ce02dd..00fef672855 100644
--- a/ci/requirements/py36-bare-minimum.yml
+++ b/ci/requirements/py36-bare-minimum.yml
@@ -4,8 +4,10 @@ channels:
dependencies:
- python=3.6
- coveralls
+ - pip
- pytest
- pytest-cov
- pytest-env
- numpy=1.15
- pandas=0.25
+ - setuptools=41.2
diff --git a/ci/requirements/py36-min-all-deps.yml b/ci/requirements/py36-min-all-deps.yml
index dc77e232dea..2781e551f23 100644
--- a/ci/requirements/py36-min-all-deps.yml
+++ b/ci/requirements/py36-min-all-deps.yml
@@ -23,6 +23,7 @@ dependencies:
- hdf5=1.10
- hypothesis
- iris=2.2
+ - isort
- lxml=4.4 # Optional dep of pydap
- matplotlib=3.1
- mypy=0.761 # Must match .pre-commit-config.yaml
@@ -42,6 +43,7 @@ dependencies:
- rasterio=1.0
- scipy=1.3
- seaborn=0.9
+ - setuptools=41.2
# - sparse # See py36-min-nep18.yml
- toolz=0.10
- zarr=2.3
diff --git a/ci/requirements/py36-min-nep18.yml b/ci/requirements/py36-min-nep18.yml
index 8fe7644d626..286b11c0de1 100644
--- a/ci/requirements/py36-min-nep18.yml
+++ b/ci/requirements/py36-min-nep18.yml
@@ -11,8 +11,10 @@ dependencies:
- numpy=1.17
- pandas=0.25
- pint=0.9 # Actually not enough as it doesn't implement __array_function__yet!
+ - pip
- pytest
- pytest-cov
- pytest-env
- scipy=1.2
+ - setuptools=41.2
- sparse=0.8
diff --git a/ci/requirements/py36.yml b/ci/requirements/py36.yml
index 7450fafbd86..a500173f277 100644
--- a/ci/requirements/py36.yml
+++ b/ci/requirements/py36.yml
@@ -19,7 +19,8 @@ dependencies:
- hdf5
- hypothesis
- iris
- - lxml # optional dep of pydap
+ - isort
+ - lxml # Optional dep of pydap
- matplotlib
- mypy=0.761 # Must match .pre-commit-config.yaml
- nc-time-axis
@@ -38,6 +39,7 @@ dependencies:
- rasterio
- scipy
- seaborn
+ - setuptools
- sparse
- toolz
- zarr
diff --git a/ci/requirements/py37-windows.yml b/ci/requirements/py37-windows.yml
index d9e634c74ae..e9e5c7a900a 100644
--- a/ci/requirements/py37-windows.yml
+++ b/ci/requirements/py37-windows.yml
@@ -19,13 +19,14 @@ dependencies:
- hdf5
- hypothesis
- iris
+ - isort
- lxml # Optional dep of pydap
- matplotlib
- mypy=0.761 # Must match .pre-commit-config.yaml
- nc-time-axis
- netcdf4
- numba
- - numpy<1.18 # FIXME https://github.com/pydata/xarray/issues/3409
+ - numpy
- pandas
- pint
- pip
@@ -38,6 +39,7 @@ dependencies:
- rasterio
- scipy
- seaborn
+ - setuptools
- sparse
- toolz
- zarr
diff --git a/ci/requirements/py37.yml b/ci/requirements/py37.yml
index 2f879e29f87..dba3926596e 100644
--- a/ci/requirements/py37.yml
+++ b/ci/requirements/py37.yml
@@ -19,6 +19,7 @@ dependencies:
- hdf5
- hypothesis
- iris
+ - isort
- lxml # Optional dep of pydap
- matplotlib
- mypy=0.761 # Must match .pre-commit-config.yaml
@@ -38,6 +39,7 @@ dependencies:
- rasterio
- scipy
- seaborn
+ - setuptools
- sparse
- toolz
- zarr
diff --git a/ci/requirements/py38.yml b/ci/requirements/py38.yml
index 9698e3efecf..24602f884e9 100644
--- a/ci/requirements/py38.yml
+++ b/ci/requirements/py38.yml
@@ -3,13 +3,45 @@ channels:
- conda-forge
dependencies:
- python=3.8
+ - black
+ - boto3
+ - bottleneck
+ - cartopy
+ - cdms2
+ - cfgrib
+ - cftime
+ - coveralls
+ - dask
+ - distributed
+ - flake8
+ - h5netcdf
+ - h5py
+ - hdf5
+ - hypothesis
+ - iris
+ - isort
+ - lxml # Optional dep of pydap
+ - matplotlib
+ - mypy=0.761 # Must match .pre-commit-config.yaml
+ - nc-time-axis
+ - netcdf4
+ - numba
+ - numpy
+ - pandas
+ - pint
- pip
+ - pseudonetcdf
+ - pydap
+ - pynio
+ - pytest
+ - pytest-cov
+ - pytest-env
+ - rasterio
+ - scipy
+ - seaborn
+ - setuptools
+ - sparse
+ - toolz
+ - zarr
- pip:
- - coveralls
- - dask
- - distributed
- - numpy
- - pandas
- - pytest
- - pytest-cov
- - pytest-env
+ - numbagg
diff --git a/doc/api-hidden.rst b/doc/api-hidden.rst
index c117b0f4fc7..437f53b1a91 100644
--- a/doc/api-hidden.rst
+++ b/doc/api-hidden.rst
@@ -583,7 +583,6 @@
CFTimeIndex.factorize
CFTimeIndex.fillna
CFTimeIndex.format
- CFTimeIndex.get_duplicates
CFTimeIndex.get_indexer
CFTimeIndex.get_indexer_for
CFTimeIndex.get_indexer_non_unique
@@ -591,7 +590,6 @@
CFTimeIndex.get_loc
CFTimeIndex.get_slice_bound
CFTimeIndex.get_value
- CFTimeIndex.get_values
CFTimeIndex.groupby
CFTimeIndex.holds_integer
CFTimeIndex.identical
@@ -603,7 +601,6 @@
CFTimeIndex.is_floating
CFTimeIndex.is_integer
CFTimeIndex.is_interval
- CFTimeIndex.is_lexsorted_for_tuple
CFTimeIndex.is_mixed
CFTimeIndex.is_numeric
CFTimeIndex.is_object
@@ -635,7 +632,6 @@
CFTimeIndex.sort_values
CFTimeIndex.sortlevel
CFTimeIndex.strftime
- CFTimeIndex.summary
CFTimeIndex.symmetric_difference
CFTimeIndex.take
CFTimeIndex.to_datetimeindex
@@ -656,16 +652,12 @@
CFTimeIndex.T
CFTimeIndex.array
CFTimeIndex.asi8
- CFTimeIndex.base
- CFTimeIndex.data
CFTimeIndex.date_type
CFTimeIndex.day
CFTimeIndex.dayofweek
CFTimeIndex.dayofyear
CFTimeIndex.dtype
- CFTimeIndex.dtype_str
CFTimeIndex.empty
- CFTimeIndex.flags
CFTimeIndex.has_duplicates
CFTimeIndex.hasnans
CFTimeIndex.hour
@@ -675,7 +667,6 @@
CFTimeIndex.is_monotonic_increasing
CFTimeIndex.is_monotonic_decreasing
CFTimeIndex.is_unique
- CFTimeIndex.itemsize
CFTimeIndex.microsecond
CFTimeIndex.minute
CFTimeIndex.month
@@ -687,7 +678,6 @@
CFTimeIndex.second
CFTimeIndex.shape
CFTimeIndex.size
- CFTimeIndex.strides
CFTimeIndex.values
CFTimeIndex.year
diff --git a/doc/api.rst b/doc/api.rst
index 9735eb0c708..4492d882355 100644
--- a/doc/api.rst
+++ b/doc/api.rst
@@ -414,7 +414,7 @@ Universal functions
for the ``xarray.ufuncs`` module, which should not be used for new code
unless compatibility with versions of NumPy prior to v1.13 is required.
-This functions are copied from NumPy, but extended to work on NumPy arrays,
+These functions are copied from NumPy, but extended to work on NumPy arrays,
dask arrays and all xarray objects. You can find them in the ``xarray.ufuncs``
module:
@@ -649,6 +649,7 @@ Plotting
plot.imshow
plot.line
plot.pcolormesh
+ plot.step
plot.FacetGrid
Faceting
diff --git a/doc/contributing.rst b/doc/contributing.rst
index 3cd0b3e8868..eb31db24591 100644
--- a/doc/contributing.rst
+++ b/doc/contributing.rst
@@ -231,9 +231,9 @@ About the *xarray* documentation
--------------------------------
The documentation is written in **reStructuredText**, which is almost like writing
-in plain English, and built using `Sphinx `__. The
+in plain English, and built using `Sphinx `__. The
Sphinx Documentation has an excellent `introduction to reST
-`__. Review the Sphinx docs to perform more
+`__. Review the Sphinx docs to perform more
complex changes to the documentation as well.
Some other important things to know about the docs:
@@ -345,33 +345,31 @@ as possible to avoid mass breakages.
Code Formatting
~~~~~~~~~~~~~~~
-Xarray uses `Black `_ and
-`Flake8 `_ to ensure a consistent code
-format throughout the project. ``black`` and ``flake8`` can be installed with
+xarray uses several tools to ensure a consistent code format throughout the project:
+
+- `Black `_ for standardized code formatting
+- `Flake8 `_ for general code quality
+- `isort `_ for standardized order in imports.
+ See also `flake8-isort `_.
+- `mypy `_ for static type checking on `type hints
+ `_
+
``pip``::
- pip install black flake8
+ pip install black flake8 isort mypy
and then run from the root of the Xarray repository::
- black .
+ isort -rc .
+ black -t py36 .
flake8
+ mypy .
to auto-format your code. Additionally, many editors have plugins that will
apply ``black`` as you edit files.
-Other recommended but optional tools for checking code quality (not currently
-enforced in CI):
-
-- `mypy `_ performs static type checking, which can
- make it easier to catch bugs. Please run ``mypy xarray`` if you annotate any
- code with `type hints `_.
-- `isort `_ will highlight
- incorrectly sorted imports. ``isort -y`` will automatically fix them. See
- also `flake8-isort `_.
-
Optionally, you may wish to setup `pre-commit hooks `_
-to automatically run ``black`` and ``flake8`` when you make a git commit. This
+to automatically run all the above tools every time you make a git commit. This
can be done by installing ``pre-commit``::
pip install pre-commit
@@ -380,25 +378,9 @@ and then running::
pre-commit install
-from the root of the Xarray repository. Now ``black`` and ``flake8`` will be run
-each time you commit changes. You can skip these checks with
+from the root of the xarray repository. You can skip the pre-commit checks with
``git commit --no-verify``.
-.. note::
-
- If you were working on a branch *prior* to the code being reformatted with black,
- you will likely face some merge conflicts. These steps can eliminate many of those
- conflicts. Because they have had limited testing, please reach out to the core devs
- on your pull request if you face any issues, and we'll help with the merge:
-
- - Merge the commit on master prior to the ``black`` commit into your branch
- ``git merge f172c673``. If you have conflicts here, resolve and commit.
- - Apply ``black .`` to your branch and commit ``git commit -am "black"``
- - Apply a patch of other changes we made on that commit: ``curl https://gist.githubusercontent.com/max-sixty/3cceb8472ed4ea806353999ca43aed52/raw/03cbee4e386156bddb61acaa250c0bfc726f596d/xarray%2520black%2520diff | git apply -``
- - Commit (``git commit -am "black2"``)
- - Merge master at the ``black`` commit, resolving in favor of 'our' changes:
- ``git merge d089df38 -X ours``. You shouldn't have any merge conflicts
- - Merge current master ``git merge master``; resolve and commit any conflicts
Backwards Compatibility
~~~~~~~~~~~~~~~~~~~~~~~
diff --git a/doc/dask.rst b/doc/dask.rst
index ed99ffaa896..07b3939af6e 100644
--- a/doc/dask.rst
+++ b/doc/dask.rst
@@ -130,6 +130,7 @@ will return a ``dask.delayed`` object that can be computed later.
A dataset can also be converted to a Dask DataFrame using :py:meth:`~xarray.Dataset.to_dask_dataframe`.
.. ipython:: python
+ :okwarning:
df = ds.to_dask_dataframe()
df
diff --git a/doc/examples/_code/weather_data_setup.py b/doc/examples/_code/weather_data_setup.py
deleted file mode 100644
index 4e4e2ab176e..00000000000
--- a/doc/examples/_code/weather_data_setup.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import numpy as np
-import pandas as pd
-import seaborn as sns
-
-import xarray as xr
-
-np.random.seed(123)
-
-times = pd.date_range("2000-01-01", "2001-12-31", name="time")
-annual_cycle = np.sin(2 * np.pi * (times.dayofyear.values / 365.25 - 0.28))
-
-base = 10 + 15 * annual_cycle.reshape(-1, 1)
-tmin_values = base + 3 * np.random.randn(annual_cycle.size, 3)
-tmax_values = base + 10 + 3 * np.random.randn(annual_cycle.size, 3)
-
-ds = xr.Dataset(
- {
- "tmin": (("time", "location"), tmin_values),
- "tmax": (("time", "location"), tmax_values),
- },
- {"time": times, "location": ["IA", "IN", "IL"]},
-)
diff --git a/doc/examples/weather-data.ipynb b/doc/examples/weather-data.ipynb
new file mode 100644
index 00000000000..f582453aacf
--- /dev/null
+++ b/doc/examples/weather-data.ipynb
@@ -0,0 +1,374 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Toy weather data\n",
+ "\n",
+ "Here is an example of how to easily manipulate a toy weather dataset using\n",
+ "xarray and other recommended Python libraries:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:43:36.127628Z",
+ "start_time": "2020-01-27T15:43:36.081733Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import seaborn as sns\n",
+ "\n",
+ "import xarray as xr\n",
+ "\n",
+ "np.random.seed(123)\n",
+ "\n",
+ "xr.set_options(display_style=\"html\")\n",
+ "\n",
+ "times = pd.date_range(\"2000-01-01\", \"2001-12-31\", name=\"time\")\n",
+ "annual_cycle = np.sin(2 * np.pi * (times.dayofyear.values / 365.25 - 0.28))\n",
+ "\n",
+ "base = 10 + 15 * annual_cycle.reshape(-1, 1)\n",
+ "tmin_values = base + 3 * np.random.randn(annual_cycle.size, 3)\n",
+ "tmax_values = base + 10 + 3 * np.random.randn(annual_cycle.size, 3)\n",
+ "\n",
+ "ds = xr.Dataset(\n",
+ " {\n",
+ " \"tmin\": ((\"time\", \"location\"), tmin_values),\n",
+ " \"tmax\": ((\"time\", \"location\"), tmax_values),\n",
+ " },\n",
+ " {\"time\": times, \"location\": [\"IA\", \"IN\", \"IL\"]},\n",
+ ")\n",
+ "\n",
+ "ds"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Examine a dataset with pandas and seaborn"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Convert to a pandas DataFrame"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:47:14.160297Z",
+ "start_time": "2020-01-27T15:47:14.126738Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "df = ds.to_dataframe()\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:47:32.682065Z",
+ "start_time": "2020-01-27T15:47:32.652629Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "df.describe()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Visualize using pandas"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:47:34.617042Z",
+ "start_time": "2020-01-27T15:47:34.282605Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "ds.mean(dim=\"location\").to_dataframe().plot()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Visualize using seaborn"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:47:37.643175Z",
+ "start_time": "2020-01-27T15:47:37.202479Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "sns.pairplot(df.reset_index(), vars=ds.data_vars)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Probability of freeze by calendar month"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:48:11.241224Z",
+ "start_time": "2020-01-27T15:48:11.211156Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "freeze = (ds[\"tmin\"] <= 0).groupby(\"time.month\").mean(\"time\")\n",
+ "freeze"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:48:13.131247Z",
+ "start_time": "2020-01-27T15:48:12.924985Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "freeze.to_pandas().plot()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Monthly averaging"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:48:08.498259Z",
+ "start_time": "2020-01-27T15:48:08.210890Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "monthly_avg = ds.resample(time=\"1MS\").mean()\n",
+ "monthly_avg.sel(location=\"IA\").to_dataframe().plot(style=\"s-\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Note that ``MS`` here refers to Month-Start; ``M`` labels Month-End (the last day of the month)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Calculate monthly anomalies"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "In climatology, \"anomalies\" refer to the difference between observations and\n",
+ "typical weather for a particular season. Unlike observations, anomalies should\n",
+ "not show any seasonal cycle."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:49:34.855086Z",
+ "start_time": "2020-01-27T15:49:34.406439Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "climatology = ds.groupby(\"time.month\").mean(\"time\")\n",
+ "anomalies = ds.groupby(\"time.month\") - climatology\n",
+ "anomalies.mean(\"location\").to_dataframe()[[\"tmin\", \"tmax\"]].plot()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Calculate standardized monthly anomalies"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "You can create standardized anomalies where the difference between the\n",
+ "observations and the climatological monthly mean is\n",
+ "divided by the climatological standard deviation."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:50:09.144586Z",
+ "start_time": "2020-01-27T15:50:08.734682Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "climatology_mean = ds.groupby(\"time.month\").mean(\"time\")\n",
+ "climatology_std = ds.groupby(\"time.month\").std(\"time\")\n",
+ "stand_anomalies = xr.apply_ufunc(\n",
+ " lambda x, m, s: (x - m) / s,\n",
+ " ds.groupby(\"time.month\"),\n",
+ " climatology_mean,\n",
+ " climatology_std,\n",
+ ")\n",
+ "\n",
+ "stand_anomalies.mean(\"location\").to_dataframe()[[\"tmin\", \"tmax\"]].plot()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Fill missing values with climatology"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:50:46.192491Z",
+ "start_time": "2020-01-27T15:50:46.174554Z"
+ }
+ },
+ "source": [
+ "The ``fillna`` method on grouped objects lets you easily fill missing values by group:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:51:40.279299Z",
+ "start_time": "2020-01-27T15:51:40.220342Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "# throw away the first half of every month\n",
+ "some_missing = ds.tmin.sel(time=ds[\"time.day\"] > 15).reindex_like(ds)\n",
+ "filled = some_missing.groupby(\"time.month\").fillna(climatology.tmin)\n",
+ "both = xr.Dataset({\"some_missing\": some_missing, \"filled\": filled})\n",
+ "both"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:52:11.815769Z",
+ "start_time": "2020-01-27T15:52:11.770825Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "df = both.sel(time=\"2000\").mean(\"location\").reset_coords(drop=True).to_dataframe()\n",
+ "df.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "ExecuteTime": {
+ "end_time": "2020-01-27T15:52:14.867866Z",
+ "start_time": "2020-01-27T15:52:14.449684Z"
+ }
+ },
+ "outputs": [],
+ "source": [
+ "df[[\"filled\", \"some_missing\"]].plot()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.7.3"
+ },
+ "toc": {
+ "base_numbering": 1,
+ "nav_menu": {},
+ "number_sections": true,
+ "sideBar": true,
+ "skip_h1_title": false,
+ "title_cell": "Table of Contents",
+ "title_sidebar": "Contents",
+ "toc_cell": true,
+ "toc_position": {},
+ "toc_section_display": true,
+ "toc_window_display": false
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/doc/examples/weather-data.rst b/doc/examples/weather-data.rst
deleted file mode 100644
index 5a019e637c4..00000000000
--- a/doc/examples/weather-data.rst
+++ /dev/null
@@ -1,138 +0,0 @@
-.. _toy weather data:
-
-Toy weather data
-================
-
-Here is an example of how to easily manipulate a toy weather dataset using
-xarray and other recommended Python libraries:
-
-.. contents::
- :local:
- :depth: 1
-
-Shared setup:
-
-.. literalinclude:: _code/weather_data_setup.py
-
-.. ipython:: python
- :suppress:
-
- fpath = "examples/_code/weather_data_setup.py"
- with open(fpath) as f:
- code = compile(f.read(), fpath, 'exec')
- exec(code)
-
-
-Examine a dataset with pandas_ and seaborn_
--------------------------------------------
-
-.. _pandas: http://pandas.pydata.org
-.. _seaborn: http://stanford.edu/~mwaskom/software/seaborn
-
-.. ipython:: python
-
- ds
-
- df = ds.to_dataframe()
-
- df.head()
-
- df.describe()
-
- @savefig examples_tmin_tmax_plot.png
- ds.mean(dim='location').to_dataframe().plot()
-
-
-.. ipython:: python
-
- @savefig examples_pairplot.png
- sns.pairplot(df.reset_index(), vars=ds.data_vars)
-
-.. _average by month:
-
-Probability of freeze by calendar month
----------------------------------------
-
-.. ipython:: python
-
- freeze = (ds['tmin'] <= 0).groupby('time.month').mean('time')
- freeze
-
- @savefig examples_freeze_prob.png
- freeze.to_pandas().plot()
-
-.. _monthly average:
-
-Monthly averaging
------------------
-
-.. ipython:: python
-
- monthly_avg = ds.resample(time='1MS').mean()
-
- @savefig examples_tmin_tmax_plot_mean.png
- monthly_avg.sel(location='IA').to_dataframe().plot(style='s-')
-
-Note that ``MS`` here refers to Month-Start; ``M`` labels Month-End (the last
-day of the month).
-
-.. _monthly anomalies:
-
-Calculate monthly anomalies
----------------------------
-
-In climatology, "anomalies" refer to the difference between observations and
-typical weather for a particular season. Unlike observations, anomalies should
-not show any seasonal cycle.
-
-.. ipython:: python
-
- climatology = ds.groupby('time.month').mean('time')
- anomalies = ds.groupby('time.month') - climatology
-
- @savefig examples_anomalies_plot.png
- anomalies.mean('location').to_dataframe()[['tmin', 'tmax']].plot()
-
-.. _standardized monthly anomalies:
-
-Calculate standardized monthly anomalies
-----------------------------------------
-
-You can create standardized anomalies where the difference between the
-observations and the climatological monthly mean is
-divided by the climatological standard deviation.
-
-.. ipython:: python
-
- climatology_mean = ds.groupby('time.month').mean('time')
- climatology_std = ds.groupby('time.month').std('time')
- stand_anomalies = xr.apply_ufunc(
- lambda x, m, s: (x - m) / s,
- ds.groupby('time.month'),
- climatology_mean, climatology_std)
-
- @savefig examples_standardized_anomalies_plot.png
- stand_anomalies.mean('location').to_dataframe()[['tmin', 'tmax']].plot()
-
-.. _fill with climatology:
-
-Fill missing values with climatology
-------------------------------------
-
-The :py:func:`~xarray.Dataset.fillna` method on grouped objects lets you easily
-fill missing values by group:
-
-.. ipython:: python
- :okwarning:
-
- # throw away the first half of every month
- some_missing = ds.tmin.sel(time=ds['time.day'] > 15).reindex_like(ds)
- filled = some_missing.groupby('time.month').fillna(climatology.tmin)
-
- both = xr.Dataset({'some_missing': some_missing, 'filled': filled})
- both
-
- df = both.sel(time='2000').mean('location').reset_coords(drop=True).to_dataframe()
-
- @savefig examples_filled.png
- df[['filled', 'some_missing']].plot()
diff --git a/doc/installing.rst b/doc/installing.rst
index 5c39f9a3c49..1635c06d5db 100644
--- a/doc/installing.rst
+++ b/doc/installing.rst
@@ -7,6 +7,7 @@ Required dependencies
---------------------
- Python (3.6 or later)
+- setuptools
- `numpy `__ (1.15 or later)
- `pandas `__ (0.25 or later)
diff --git a/doc/time-series.rst b/doc/time-series.rst
index 1cb535ea886..d838dbbd4cd 100644
--- a/doc/time-series.rst
+++ b/doc/time-series.rst
@@ -222,4 +222,4 @@ Data that has indices outside of the given ``tolerance`` are set to ``NaN``.
For more examples of using grouped operations on a time dimension, see
-:ref:`toy weather data`.
+:doc:`examples/weather-data`.
diff --git a/doc/whats-new.rst b/doc/whats-new.rst
index 4aad6ad3701..7fa70d0b67a 100644
--- a/doc/whats-new.rst
+++ b/doc/whats-new.rst
@@ -13,15 +13,44 @@ What's New
import xarray as xr
np.random.seed(123456)
+.. _whats-new.0.15.1:
+
+
+v0.15.1 (unreleased)
+---------------------
+
+Breaking changes
+~~~~~~~~~~~~~~~~
+
+New Features
+~~~~~~~~~~~~
+
+Bug fixes
+~~~~~~~~~
+
+Documentation
+~~~~~~~~~~~~~
+
+Internal Changes
+~~~~~~~~~~~~~~~~
+
.. _whats-new.0.15.0:
-v0.15.0 (unreleased)
---------------------
+v0.15.0 (30 Jan 2020)
+---------------------
+
+This release brings many improvements to xarray's documentation: our examples are now binderized notebooks (`click here `_)
+and we have new example notebooks from our SciPy 2019 sprint (many thanks to our contributors!).
+
+This release also features many API improvements such as a new
+:py:class:`~core.accessor_dt.TimedeltaAccessor` and support for :py:class:`CFTimeIndex` in
+:py:meth:`~DataArray.interpolate_na`); as well as many bug fixes.
Breaking changes
~~~~~~~~~~~~~~~~
- Bumped minimum tested versions for dependencies:
+
- numpy 1.15
- pandas 0.25
- dask 2.2
@@ -30,14 +59,13 @@ Breaking changes
- Remove ``compat`` and ``encoding`` kwargs from ``DataArray``, which
have been deprecated since 0.12. (:pull:`3650`).
- Instead, specify the encoding when writing to disk or set
- the ``encoding`` attribute directly.
- By `Maximilian Roos `_
+ Instead, specify the ``encoding`` kwarg when writing to disk or set
+ the :py:attr:`DataArray.encoding` attribute directly.
+ By `Maximilian Roos `_.
- :py:func:`xarray.dot`, :py:meth:`DataArray.dot`, and the ``@`` operator now
use ``align="inner"`` (except when ``xarray.set_options(arithmetic_join="exact")``;
:issue:`3694`) by `Mathias Hauser `_.
-
New Features
~~~~~~~~~~~~
- :py:meth:`DataArray.sel` and :py:meth:`Dataset.sel` now support :py:class:`pandas.CategoricalIndex`. (:issue:`3669`)
@@ -47,10 +75,10 @@ New Features
:py:class:`~xarray.Dataset` from a h5netcdf ``File`` that has been opened
using other means (:issue:`3618`).
By `Kai Mühlbauer `_.
-- Implement :py:func:`median` and :py:func:`nanmedian` for dask arrays. This works by rechunking
+- Implement ``median`` and ``nanmedian`` for dask arrays. This works by rechunking
to a single chunk along all reduction axes. (:issue:`2999`).
By `Deepak Cherian `_.
-- :py:func:`xarray.concat` now preserves attributes from the first Variable.
+- :py:func:`~xarray.concat` now preserves attributes from the first Variable.
(:issue:`2575`, :issue:`2060`, :issue:`1614`)
By `Deepak Cherian `_.
- :py:meth:`Dataset.quantile`, :py:meth:`DataArray.quantile` and ``GroupBy.quantile``
@@ -59,44 +87,48 @@ New Features
- Added the ``count`` reduction method to both :py:class:`~core.rolling.DatasetCoarsen`
and :py:class:`~core.rolling.DataArrayCoarsen` objects. (:pull:`3500`)
By `Deepak Cherian `_
-- Add ``meta`` kwarg to :py:func:`~xarray.apply_ufunc`; this is passed on to
- :py:meth:`dask.array.blockwise`. (:pull:`3660`) By `Deepak Cherian `_.
-- Add `attrs_file` option in :py:func:`~xarray.open_mfdataset` to choose the
+- Add ``meta`` kwarg to :py:func:`~xarray.apply_ufunc`;
+ this is passed on to :py:func:`dask.array.blockwise`. (:pull:`3660`)
+ By `Deepak Cherian `_.
+- Add ``attrs_file`` option in :py:func:`~xarray.open_mfdataset` to choose the
source file for global attributes in a multi-file dataset (:issue:`2382`,
- :pull:`3498`) by `Julien Seguinot _`.
+ :pull:`3498`). By `Julien Seguinot `_.
- :py:meth:`Dataset.swap_dims` and :py:meth:`DataArray.swap_dims`
now allow swapping to dimension names that don't exist yet. (:pull:`3636`)
By `Justus Magin `_.
-- Extend :py:class:`core.accessor_dt.DatetimeAccessor` properties
- and support `.dt` accessor for timedelta
- via :py:class:`core.accessor_dt.TimedeltaAccessor` (:pull:`3612`)
+- Extend :py:class:`~core.accessor_dt.DatetimeAccessor` properties
+ and support ``.dt`` accessor for timedeltas
+ via :py:class:`~core.accessor_dt.TimedeltaAccessor` (:pull:`3612`)
By `Anderson Banihirwe `_.
-- Support CFTimeIndex in :py:meth:`DataArray.interpolate_na`, define 1970-01-01
- as the default offset for the interpolation index for both DatetimeIndex and
- CFTimeIndex, use microseconds in the conversion from timedelta objects
- to floats to avoid overflow errors (:issue:`3641`, :pull:`3631`).
- By David Huard ``_.
+- Improvements to interpolating along time axes (:issue:`3641`, :pull:`3631`).
+ By `David Huard `_.
+
+ - Support :py:class:`CFTimeIndex` in :py:meth:`DataArray.interpolate_na`
+ - define 1970-01-01 as the default offset for the interpolation index for both
+ :py:class:`pandas.DatetimeIndex` and :py:class:`CFTimeIndex`,
+ - use microseconds in the conversion from timedelta objects to floats to avoid
+ overflow errors.
Bug fixes
~~~~~~~~~
- Applying a user-defined function that adds new dimensions using :py:func:`apply_ufunc`
and ``vectorize=True`` now works with ``dask > 2.0``. (:issue:`3574`, :pull:`3660`).
By `Deepak Cherian `_.
-- Fix :py:meth:`xarray.combine_by_coords` to allow for combining incomplete
+- Fix :py:meth:`~xarray.combine_by_coords` to allow for combining incomplete
hypercubes of Datasets (:issue:`3648`). By `Ian Bolliger
`_.
-- Fix :py:meth:`xarray.combine_by_coords` when combining cftime coordinates
+- Fix :py:func:`~xarray.combine_by_coords` when combining cftime coordinates
which span long time intervals (:issue:`3535`). By `Spencer Clark
`_.
- Fix plotting with transposed 2D non-dimensional coordinates. (:issue:`3138`, :pull:`3441`)
By `Deepak Cherian `_.
-- :py:meth:`~xarray.plot.FacetGrid.set_titles` can now replace existing row titles of a
+- :py:meth:`plot.FacetGrid.set_titles` can now replace existing row titles of a
:py:class:`~xarray.plot.FacetGrid` plot. In addition :py:class:`~xarray.plot.FacetGrid` gained
two new attributes: :py:attr:`~xarray.plot.FacetGrid.col_labels` and
- :py:attr:`~xarray.plot.FacetGrid.row_labels` contain matplotlib Text handles for both column and
+ :py:attr:`~xarray.plot.FacetGrid.row_labels` contain :py:class:`matplotlib.text.Text` handles for both column and
row labels. These can be used to manually change the labels.
By `Deepak Cherian `_.
-- Fix issue with Dask-backed datasets raising a ``KeyError`` on some computations involving ``map_blocks`` (:pull:`3598`)
+- Fix issue with Dask-backed datasets raising a ``KeyError`` on some computations involving :py:func:`map_blocks` (:pull:`3598`).
By `Tom Augspurger `_.
- Ensure :py:meth:`Dataset.quantile`, :py:meth:`DataArray.quantile` issue the correct error
when ``q`` is out of bounds (:issue:`3634`) by `Mathias Hauser `_.
@@ -108,36 +140,36 @@ Bug fixes
By `Justus Magin `_.
- :py:meth:`Dataset.rename`, :py:meth:`DataArray.rename` now check for conflicts with
MultiIndex level names.
-- :py:meth:`Dataset.merge` no longer fails when passed a `DataArray` instead of a `Dataset` object.
+- :py:meth:`Dataset.merge` no longer fails when passed a :py:class:`DataArray` instead of a :py:class:`Dataset`.
By `Tom Nicholas `_.
- Fix a regression in :py:meth:`Dataset.drop`: allow passing any
iterable when dropping variables (:issue:`3552`, :pull:`3693`)
By `Justus Magin `_.
- Fixed errors emitted by ``mypy --strict`` in modules that import xarray.
(:issue:`3695`) by `Guido Imperiale `_.
-- Fix plotting of binned coordinates on the y axis in :py:meth:`DataArray.plot`
- (line) and :py:meth:`DataArray.plot.step` plots (:issue:`#3571`,
- :pull:`3685`) by `Julien Seguinot _`.
+- Allow plotting of binned coordinates on the y axis in :py:meth:`plot.line`
+ and :py:meth:`plot.step` plots (:issue:`3571`,
+ :pull:`3685`) by `Julien Seguinot `_.
+- setuptools is now marked as a dependency of xarray
+ (:pull:`3628`) by `Richard Höchenberger `_.
Documentation
~~~~~~~~~~~~~
-- Switch doc examples to use nbsphinx and replace sphinx_gallery with
- notebook.
- (:pull:`3105`, :pull:`3106`, :pull:`3121`)
- By `Ryan Abernathey `_
-- Added example notebook demonstrating use of xarray with Regional Ocean
- Modeling System (ROMS) ocean hydrodynamic model output.
- (:pull:`3116`).
- By `Robert Hetland `_
-- Added example notebook demonstrating the visualization of ERA5 GRIB
- data. (:pull:`3199`)
+- Switch doc examples to use `nbsphinx `_ and replace
+ ``sphinx_gallery`` scripts with Jupyter notebooks. (:pull:`3105`, :pull:`3106`, :pull:`3121`)
+ By `Ryan Abernathey `_.
+- Added :doc:`example notebook ` demonstrating use of xarray with
+ Regional Ocean Modeling System (ROMS) ocean hydrodynamic model output. (:pull:`3116`)
+ By `Robert Hetland `_.
+- Added :doc:`example notebook ` demonstrating the visualization of
+ ERA5 GRIB data. (:pull:`3199`)
By `Zach Bruick `_ and
- `Stephan Siemen `_
+ `Stephan Siemen `_.
- Added examples for :py:meth:`DataArray.quantile`, :py:meth:`Dataset.quantile` and
``GroupBy.quantile``. (:pull:`3576`)
By `Justus Magin `_.
-- Add new :py:func:`apply_ufunc` example notebook demonstrating vectorization of a 1D
- function using dask and numba.
+- Add new :doc:`example notebook ` example notebook demonstrating
+ vectorization of a 1D function using :py:func:`apply_ufunc` , dask and numba.
By `Deepak Cherian `_.
- Added example for :py:func:`~xarray.map_blocks`. (:pull:`3667`)
By `Riley X. Brady `_.
@@ -153,11 +185,15 @@ Internal Changes
(:pull:`3533`) by `Guido Imperiale `_.
- Removed internal method ``Dataset._from_vars_and_coord_names``,
which was dominated by ``Dataset._construct_direct``. (:pull:`3565`)
- By `Maximilian Roos `_
+ By `Maximilian Roos `_.
- Replaced versioneer with setuptools-scm. Moved contents of setup.py to setup.cfg.
Removed pytest-runner from setup.py, as per deprecation notice on the pytest-runner
- project. (:pull:`3714`) by `Guido Imperiale `_
+ project. (:pull:`3714`) by `Guido Imperiale `_.
+- Use of isort is now enforced by CI.
+ (:pull:`3721`) by `Guido Imperiale `_
+
+.. _whats-new.0.14.1:
v0.14.1 (19 Nov 2019)
---------------------
@@ -1749,7 +1785,7 @@ Documentation
- Added a new guide on :ref:`contributing` (:issue:`640`)
By `Joe Hamman `_.
-- Added apply_ufunc example to :ref:`toy weather data` (:issue:`1844`).
+- Added apply_ufunc example to :ref:`/examples/weather-data.ipynb#Toy-weather-data` (:issue:`1844`).
By `Liam Brannigan `_.
- New entry `Why don’t aggregations return Python scalars?` in the
:doc:`faq` (:issue:`1726`).
@@ -3657,7 +3693,7 @@ Enhancements
``fillna`` works on both ``Dataset`` and ``DataArray`` objects, and uses
index based alignment and broadcasting like standard binary operations. It
also can be applied by group, as illustrated in
- :ref:`fill with climatology`.
+ :ref:`/examples/weather-data.ipynb#Fill-missing-values-with-climatology`.
- New ``xray.Dataset.assign`` and ``xray.Dataset.assign_coords``
methods patterned off the new :py:meth:`DataFrame.assign `
method in pandas:
diff --git a/licenses/DASK_LICENSE b/licenses/DASK_LICENSE
index 893bddfb933..e98784cd600 100644
--- a/licenses/DASK_LICENSE
+++ b/licenses/DASK_LICENSE
@@ -1,4 +1,4 @@
-:py:meth:`~xarray.DataArray.isin`Copyright (c) 2014-2018, Anaconda, Inc. and contributors
+Copyright (c) 2014-2018, Anaconda, Inc. and contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
diff --git a/readthedocs.yml b/readthedocs.yml
index 9ed8d28eaf2..ad249bf8c09 100644
--- a/readthedocs.yml
+++ b/readthedocs.yml
@@ -7,7 +7,7 @@ conda:
environment: ci/requirements/doc.yml
python:
- version: 3.7
+ version: 3.8
install: []
formats: []
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 00000000000..f73887ff5cc
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,7 @@
+# This file is redundant with setup.cfg;
+# it exists to let GitHub build the repository dependency graph
+# https://help.github.com/en/github/visualizing-repository-data-with-graphs/listing-the-packages-that-a-repository-depends-on
+
+numpy >= 1.15
+pandas >= 0.25
+setuptools >= 41.2
diff --git a/setup.cfg b/setup.cfg
index e336f46e68c..42dc53bb882 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -4,7 +4,7 @@ author = xarray Developers
author_email = xarray@googlegroups.com
license = Apache
description = N-D labeled arrays and datasets in Python
-long_description_content_type=x-rst
+long_description_content_type=text/x-rst
long_description =
**xarray** (formerly **xray**) is an open source project and Python package
that makes working with labelled multi-dimensional arrays simple,
@@ -43,16 +43,11 @@ long_description =
powerful and concise interface. For example:
- Apply operations over dimensions by name: ``x.sum('time')``.
- - Select values by label instead of integer location:
- ``x.loc['2014-01-01']`` or ``x.sel(time='2014-01-01')``.
- - Mathematical operations (e.g., ``x - y``) vectorize across multiple
- dimensions (array broadcasting) based on dimension names, not shape.
- - Flexible split-apply-combine operations with groupby:
- ``x.groupby('time.dayofyear').mean()``.
- - Database like alignment based on coordinate labels that smoothly
- handles missing values: ``x, y = xr.align(x, y, join='outer')``.
- - Keep track of arbitrary metadata in the form of a Python dictionary:
- ``x.attrs``.
+ - Select values by label instead of integer location: ``x.loc['2014-01-01']`` or ``x.sel(time='2014-01-01')``.
+ - Mathematical operations (e.g., ``x - y``) vectorize across multiple dimensions (array broadcasting) based on dimension names, not shape.
+ - Flexible split-apply-combine operations with groupby: ``x.groupby('time.dayofyear').mean()``.
+ - Database like alignment based on coordinate labels that smoothly handles missing values: ``x, y = xr.align(x, y, join='outer')``.
+ - Keep track of arbitrary metadata in the form of a Python dictionary: ``x.attrs``.
Learn more
----------
@@ -81,7 +76,10 @@ python_requires = >=3.6
install_requires =
numpy >= 1.15
pandas >= 0.25
-setup_requires = setuptools_scm
+ setuptools >= 41.2 # For pkg_resources
+setup_requires =
+ setuptools >= 41.2
+ setuptools_scm
[options.package_data]
xarray =
diff --git a/xarray/__init__.py b/xarray/__init__.py
index 44dc66411c4..331d8ecb09a 100644
--- a/xarray/__init__.py
+++ b/xarray/__init__.py
@@ -1,3 +1,5 @@
+import pkg_resources
+
from . import testing, tutorial, ufuncs
from .backends.api import (
load_dataarray,
@@ -27,11 +29,9 @@
from .util.print_versions import show_versions
try:
- import pkg_resources
-
__version__ = pkg_resources.get_distribution("xarray").version
except Exception:
- # Local copy, not installed with setuptools, or setuptools is not available.
+ # Local copy or not installed with setuptools.
# Disable minimum version checks on downstream libraries.
__version__ = "999"
diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py
index a658f125054..062cc6342df 100644
--- a/xarray/core/dataarray.py
+++ b/xarray/core/dataarray.py
@@ -1,3 +1,4 @@
+import datetime
import functools
import warnings
from numbers import Number
@@ -18,7 +19,6 @@
cast,
)
-import datetime
import numpy as np
import pandas as pd
diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py
index c72ed6cc7d6..07bea6dac19 100644
--- a/xarray/core/dataset.py
+++ b/xarray/core/dataset.py
@@ -1,4 +1,5 @@
import copy
+import datetime
import functools
import sys
import warnings
@@ -27,7 +28,6 @@
cast,
)
-import datetime
import numpy as np
import pandas as pd
diff --git a/xarray/core/missing.py b/xarray/core/missing.py
index b20441e993c..40f010b3514 100644
--- a/xarray/core/missing.py
+++ b/xarray/core/missing.py
@@ -1,8 +1,8 @@
+import datetime as dt
import warnings
from functools import partial
from numbers import Number
from typing import Any, Callable, Dict, Hashable, Sequence, Union
-import datetime as dt
import numpy as np
import pandas as pd
diff --git a/xarray/core/nputils.py b/xarray/core/nputils.py
index dba67174fc1..cf189e471cc 100644
--- a/xarray/core/nputils.py
+++ b/xarray/core/nputils.py
@@ -2,7 +2,6 @@
import numpy as np
import pandas as pd
-
from numpy.core.multiarray import normalize_axis_index
try:
diff --git a/xarray/plot/plot.py b/xarray/plot/plot.py
index 948e7960098..80005d1cf78 100644
--- a/xarray/plot/plot.py
+++ b/xarray/plot/plot.py
@@ -380,6 +380,7 @@ def step(darray, *args, where="pre", linestyle=None, ls=None, **kwargs):
----------
where : {'pre', 'post', 'mid'}, optional, default 'pre'
Define where the steps should be placed:
+
- 'pre': The y value is continued constantly to the left from
every *x* position, i.e. the interval ``(x[i-1], x[i]]`` has the
value ``y[i]``.
@@ -387,12 +388,13 @@ def step(darray, *args, where="pre", linestyle=None, ls=None, **kwargs):
every *x* position, i.e. the interval ``[x[i], x[i+1])`` has the
value ``y[i]``.
- 'mid': Steps occur half-way between the *x* positions.
+
Note that this parameter is ignored if one coordinate consists of
:py:func:`pandas.Interval` values, e.g. as a result of
:py:func:`xarray.Dataset.groupby_bins`. In this case, the actual
boundaries of the interval are used.
- *args, **kwargs : optional
+ ``*args``, ``**kwargs`` : optional
Additional arguments following :py:func:`xarray.plot.line`
"""
if where not in {"pre", "post", "mid"}:
diff --git a/xarray/tests/test_duck_array_ops.py b/xarray/tests/test_duck_array_ops.py
index 96c883baa67..f4f11473e48 100644
--- a/xarray/tests/test_duck_array_ops.py
+++ b/xarray/tests/test_duck_array_ops.py
@@ -1,7 +1,7 @@
+import datetime as dt
import warnings
from textwrap import dedent
-import datetime as dt
import numpy as np
import pandas as pd
import pytest
@@ -17,13 +17,13 @@
gradient,
last,
mean,
- rolling_window,
- stack,
- where,
- py_timedelta_to_float,
np_timedelta64_to_float,
pd_timedelta_to_float,
+ py_timedelta_to_float,
+ rolling_window,
+ stack,
timedelta_to_numeric,
+ where,
)
from xarray.core.pycompat import dask_array_type
from xarray.testing import assert_allclose, assert_equal
@@ -753,7 +753,7 @@ def test_pd_timedelta_to_float(td, expected):
@pytest.mark.parametrize(
- "td", [dt.timedelta(days=1), np.timedelta64(1, "D"), pd.Timedelta(1, "D"), "1 day"],
+ "td", [dt.timedelta(days=1), np.timedelta64(1, "D"), pd.Timedelta(1, "D"), "1 day"]
)
def test_timedelta_to_numeric(td):
# Scalar input
diff --git a/xarray/tests/test_missing.py b/xarray/tests/test_missing.py
index 8d70d9a0fcc..35c71c2854c 100644
--- a/xarray/tests/test_missing.py
+++ b/xarray/tests/test_missing.py
@@ -14,16 +14,15 @@
)
from xarray.core.pycompat import dask_array_type
from xarray.tests import (
+ assert_allclose,
assert_array_equal,
assert_equal,
- assert_allclose,
raises_regex,
requires_bottleneck,
+ requires_cftime,
requires_dask,
requires_scipy,
- requires_cftime,
)
-
from xarray.tests.test_cftime_offsets import _CFTIME_CALENDARS