Skip to content
forked from pydata/xarray

Commit

Permalink
Merge branch 'master' into concat-join
Browse files Browse the repository at this point in the history
* master:
  More annotations in Dataset (pydata#3112)
  Hotfix for case of combining identical non-monotonic coords (pydata#3151)
  changed url for rasterio network test (pydata#3162)
  • Loading branch information
dcherian committed Aug 1, 2019
2 parents 1afd9f2 + 1757dff commit 386eecd
Show file tree
Hide file tree
Showing 17 changed files with 460 additions and 265 deletions.
1 change: 1 addition & 0 deletions ci/requirements/py36-hypothesis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ dependencies:
- seaborn
- toolz
- rasterio
- boto3
- bottleneck
- zarr
- pydap
Expand Down
1 change: 1 addition & 0 deletions ci/requirements/py36.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ dependencies:
- seaborn
- toolz
- rasterio
- boto3
- bottleneck
- zarr
- pseudonetcdf>=3.0.1
Expand Down
1 change: 1 addition & 0 deletions ci/requirements/py37-windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@ dependencies:
- seaborn
- toolz
- rasterio
- boto3
- zarr
1 change: 1 addition & 0 deletions ci/requirements/py37.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ dependencies:
- seaborn
- toolz
- rasterio
- boto3
- bottleneck
- zarr
- pseudonetcdf>=3.0.1
Expand Down
13 changes: 12 additions & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,15 @@ New functions/methods
By `Deepak Cherian <https://github.com/dcherian>`_ and `David Mertz
<http://github.com/DavidMertz>`_.

- The xarray package is now discoverably by mypy (although typing hints
coverage is not complete yet). mypy users can now remove from their setup.cfg
the lines::

[mypy-xarray]
ignore_missing_imports = True

By `Guido Imperiale <https://github.com/crusaderky>`_

Enhancements
~~~~~~~~~~~~

Expand All @@ -47,6 +56,9 @@ Bug fixes
- XFAIL several tests which are expected to fail on ARM systems
due to a ``datetime`` issue in NumPy (:issue:`2334`).
By `Graham Inggs <https://github.com/ginggs>`_.
- Fixed bug in ``combine_by_coords()`` causing a `ValueError` if the input had
an unused dimension with coordinates which were not monotonic (:issue`3150`).
By `Tom Nicholas <http://github.com/TomNicholas>`_.

.. _whats-new.0.12.3:

Expand All @@ -69,7 +81,6 @@ New functions/methods
(:issue:`3026`).
By `Julia Kent <https://github.com/jukent>`_.


Enhancements
~~~~~~~~~~~~

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,4 +104,4 @@
tests_require=TESTS_REQUIRE,
url=URL,
packages=find_packages(),
package_data={'xarray': ['tests/data/*']})
package_data={'xarray': ['py.typed', 'tests/data/*']})
33 changes: 25 additions & 8 deletions xarray/core/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,16 @@
import warnings
from collections import OrderedDict, defaultdict
from contextlib import suppress
from typing import Any, Mapping, Optional, Tuple
from typing import (
Any,
Dict,
Hashable,
Mapping,
Optional,
Tuple,
Union,
TYPE_CHECKING,
)

import numpy as np
import pandas as pd
Expand All @@ -13,6 +22,10 @@
from .utils import is_dict_like, is_full_slice
from .variable import IndexVariable, Variable

if TYPE_CHECKING:
from .dataarray import DataArray
from .dataset import Dataset


def _get_joiner(join):
if join == 'outer':
Expand Down Expand Up @@ -169,8 +182,8 @@ def deep_align(objects, join='inner', copy=True, indexes=None,
This function is not public API.
"""
from .dataarray import DataArray
from .dataset import Dataset
from .dataarray import DataArray # noqa: F811
from .dataset import Dataset # noqa: F811

if indexes is None:
indexes = {}
Expand Down Expand Up @@ -222,7 +235,10 @@ def is_alignable(obj):
return out


def reindex_like_indexers(target, other):
def reindex_like_indexers(
target: Union['DataArray', 'Dataset'],
other: Union['DataArray', 'Dataset'],
) -> Dict[Hashable, pd.Index]:
"""Extract indexers to align target with other.
Not public API.
Expand All @@ -236,7 +252,8 @@ def reindex_like_indexers(target, other):
Returns
-------
Dict[Any, pandas.Index] providing indexes for reindex keyword arguments.
Dict[Hashable, pandas.Index] providing indexes for reindex keyword
arguments.
Raises
------
Expand Down Expand Up @@ -310,7 +327,7 @@ def reindex_variables(
new_indexes : OrderedDict
Dict of indexes associated with the reindexed variables.
"""
from .dataarray import DataArray
from .dataarray import DataArray # noqa: F811

# create variables for the new dataset
reindexed = OrderedDict() # type: OrderedDict[Any, Variable]
Expand Down Expand Up @@ -407,8 +424,8 @@ def _get_broadcast_dims_map_common_coords(args, exclude):

def _broadcast_helper(arg, exclude, dims_map, common_coords):

from .dataarray import DataArray
from .dataset import Dataset
from .dataarray import DataArray # noqa: F811
from .dataset import Dataset # noqa: F811

def _set_dims(var):
# Add excluded dims to a copy of dims_map
Expand Down
15 changes: 7 additions & 8 deletions xarray/core/combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -509,14 +509,13 @@ def combine_by_coords(datasets, compat='no_conflicts', data_vars='all',
fill_value=fill_value, join=join)

# Check the overall coordinates are monotonically increasing
for dim in concatenated.dims:
if dim in concatenated:
indexes = concatenated.indexes.get(dim)
if not (indexes.is_monotonic_increasing
or indexes.is_monotonic_decreasing):
raise ValueError("Resulting object does not have monotonic"
" global indexes along dimension {}"
.format(dim))
for dim in concat_dims:
indexes = concatenated.indexes.get(dim)
if not (indexes.is_monotonic_increasing
or indexes.is_monotonic_decreasing):
raise ValueError("Resulting object does not have monotonic"
" global indexes along dimension {}"
.format(dim))
concatenated_grouped_by_data_vars.append(concatenated)

return merge(concatenated_grouped_by_data_vars, compat=compat,
Expand Down
34 changes: 17 additions & 17 deletions xarray/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from textwrap import dedent
from typing import (
Any, Callable, Hashable, Iterable, Iterator, List, Mapping, MutableMapping,
Optional, Tuple, TypeVar, Union)
Tuple, TypeVar, Union)

import numpy as np
import pandas as pd
Expand Down Expand Up @@ -101,7 +101,7 @@ def __int__(self: Any) -> int:
def __complex__(self: Any) -> complex:
return complex(self.values)

def __array__(self: Any, dtype: Optional[DTypeLike] = None) -> np.ndarray:
def __array__(self: Any, dtype: DTypeLike = None) -> np.ndarray:
return np.asarray(self.values, dtype=dtype)

def __repr__(self) -> str:
Expand Down Expand Up @@ -448,7 +448,7 @@ def pipe(self, func: Union[Callable[..., T], Tuple[Callable[..., T], str]],
return func(self, *args, **kwargs)

def groupby(self, group, squeeze: bool = True,
restore_coord_dims: Optional[bool] = None):
restore_coord_dims: bool = None):
"""Returns a GroupBy object for performing grouped operations.
Parameters
Expand Down Expand Up @@ -501,7 +501,7 @@ def groupby(self, group, squeeze: bool = True,
def groupby_bins(self, group, bins, right: bool = True, labels=None,
precision: int = 3, include_lowest: bool = False,
squeeze: bool = True,
restore_coord_dims: Optional[bool] = None):
restore_coord_dims: bool = None):
"""Returns a GroupBy object for performing grouped operations.
Rather than using all unique values of `group`, the values are discretized
Expand Down Expand Up @@ -557,8 +557,8 @@ def groupby_bins(self, group, bins, right: bool = True, labels=None,
'include_lowest':
include_lowest})

def rolling(self, dim: Optional[Mapping[Hashable, int]] = None,
min_periods: Optional[int] = None, center: bool = False,
def rolling(self, dim: Mapping[Hashable, int] = None,
min_periods: int = None, center: bool = False,
**window_kwargs: int):
"""
Rolling window object.
Expand Down Expand Up @@ -621,7 +621,7 @@ def rolling(self, dim: Optional[Mapping[Hashable, int]] = None,

def rolling_exp(
self,
window: Optional[Mapping[Hashable, int]] = None,
window: Mapping[Hashable, int] = None,
window_type: str = 'span',
**window_kwargs
):
Expand Down Expand Up @@ -658,7 +658,7 @@ def rolling_exp(

return self._rolling_exp_cls(self, window, window_type)

def coarsen(self, dim: Optional[Mapping[Hashable, int]] = None,
def coarsen(self, dim: Mapping[Hashable, int] = None,
boundary: str = 'exact',
side: Union[str, Mapping[Hashable, str]] = 'left',
coord_func: str = 'mean',
Expand Down Expand Up @@ -721,11 +721,11 @@ def coarsen(self, dim: Optional[Mapping[Hashable, int]] = None,
self, dim, boundary=boundary, side=side,
coord_func=coord_func)

def resample(self, indexer: Optional[Mapping[Hashable, str]] = None,
skipna=None, closed: Optional[str] = None,
label: Optional[str] = None,
base: int = 0, keep_attrs: Optional[bool] = None,
loffset=None, restore_coord_dims: Optional[bool] = None,
def resample(self, indexer: Mapping[Hashable, str] = None,
skipna=None, closed: str = None,
label: str = None,
base: int = 0, keep_attrs: bool = None,
loffset=None, restore_coord_dims: bool = None,
**indexer_kwargs: str):
"""Returns a Resample object for performing resampling operations.
Expand Down Expand Up @@ -1003,7 +1003,7 @@ def __getitem__(self, value):
raise NotImplementedError


def full_like(other, fill_value, dtype: Optional[DTypeLike] = None):
def full_like(other, fill_value, dtype: DTypeLike = None):
"""Return a new object with the same shape and type as a given object.
Parameters
Expand Down Expand Up @@ -1044,7 +1044,7 @@ def full_like(other, fill_value, dtype: Optional[DTypeLike] = None):


def _full_like_variable(other, fill_value,
dtype: Optional[DTypeLike] = None):
dtype: DTypeLike = None):
"""Inner function of full_like, where other must be a variable
"""
from .variable import Variable
Expand All @@ -1061,13 +1061,13 @@ def _full_like_variable(other, fill_value,
return Variable(dims=other.dims, data=data, attrs=other.attrs)


def zeros_like(other, dtype: Optional[DTypeLike] = None):
def zeros_like(other, dtype: DTypeLike = None):
"""Shorthand for full_like(other, 0, dtype)
"""
return full_like(other, 0, dtype)


def ones_like(other, dtype: Optional[DTypeLike] = None):
def ones_like(other, dtype: DTypeLike = None):
"""Shorthand for full_like(other, 1, dtype)
"""
return full_like(other, 1, dtype)
Expand Down
6 changes: 3 additions & 3 deletions xarray/core/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -683,7 +683,7 @@ def apply_array_ufunc(func, *args, dask='forbidden'):
def apply_ufunc(
func: Callable,
*args: Any,
input_core_dims: Optional[Sequence[Sequence]] = None,
input_core_dims: Sequence[Sequence] = None,
output_core_dims: Optional[Sequence[Sequence]] = ((),),
exclude_dims: AbstractSet = frozenset(),
vectorize: bool = False,
Expand All @@ -693,8 +693,8 @@ def apply_ufunc(
keep_attrs: bool = False,
kwargs: Mapping = None,
dask: str = 'forbidden',
output_dtypes: Optional[Sequence] = None,
output_sizes: Optional[Mapping[Any, int]] = None
output_dtypes: Sequence = None,
output_sizes: Mapping[Any, int] = None
) -> Any:
"""Apply a vectorized function for unlabeled arrays on xarray objects.
Expand Down
Loading

0 comments on commit 386eecd

Please sign in to comment.