Skip to content

Commit

Permalink
CLN: remove methods of ExtensionIndex that duplicate base Index (#34163)
Browse files Browse the repository at this point in the history
  • Loading branch information
jorisvandenbossche authored May 15, 2020
1 parent 085af07 commit c10020f
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 43 deletions.
3 changes: 1 addition & 2 deletions pandas/core/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1257,8 +1257,7 @@ def value_counts(
def unique(self):
values = self._values

if hasattr(values, "unique"):

if not isinstance(values, np.ndarray):
result = values.unique()
if self.dtype.kind in ["m", "M"] and isinstance(self, ABCSeries):
# GH#31182 Series._values returns EA, unpack for backward-compat
Expand Down
42 changes: 1 addition & 41 deletions pandas/core/indexes/extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,7 @@
from pandas.errors import AbstractMethodError
from pandas.util._decorators import cache_readonly, doc

from pandas.core.dtypes.common import (
ensure_platform_int,
is_dtype_equal,
is_object_dtype,
)
from pandas.core.dtypes.common import is_dtype_equal, is_object_dtype
from pandas.core.dtypes.generic import ABCSeries

from pandas.core.arrays import ExtensionArray
Expand Down Expand Up @@ -223,29 +219,14 @@ def __getitem__(self, key):
deprecate_ndim_indexing(result)
return result

def __iter__(self):
return self._data.__iter__()

# ---------------------------------------------------------------------

def __array__(self, dtype=None) -> np.ndarray:
return np.asarray(self._data, dtype=dtype)

def _get_engine_target(self) -> np.ndarray:
# NB: _values_for_argsort happens to match the desired engine targets
# for all of our existing EA-backed indexes, but in general
# cannot be relied upon to exist.
return self._data._values_for_argsort()

@doc(Index.dropna)
def dropna(self, how="any"):
if how not in ("any", "all"):
raise ValueError(f"invalid how option: {how}")

if self.hasnans:
return self._shallow_copy(self._data[~self._isnan])
return self._shallow_copy()

def repeat(self, repeats, axis=None):
nv.validate_repeat(tuple(), dict(axis=axis))
result = self._data.repeat(repeats, axis=axis)
Expand All @@ -259,27 +240,6 @@ def _concat_same_dtype(self, to_concat, name):
arr = type(self._data)._concat_same_type(to_concat)
return type(self)._simple_new(arr, name=name)

@doc(Index.take)
def take(self, indices, axis=0, allow_fill=True, fill_value=None, **kwargs):
nv.validate_take(tuple(), kwargs)
indices = ensure_platform_int(indices)

taken = self._assert_take_fillable(
self._data,
indices,
allow_fill=allow_fill,
fill_value=fill_value,
na_value=self._na_value,
)
return type(self)(taken, name=self.name)

def unique(self, level=None):
if level is not None:
self._validate_index_level(level)

result = self._data.unique()
return self._shallow_copy(result)

def _get_unique_index(self, dropna=False):
if self.is_unique and not dropna:
return self
Expand Down

0 comments on commit c10020f

Please sign in to comment.