diff --git a/pandas/core/accessor.py b/pandas/core/accessor.py index 961488ff12e580..050749741e7bd0 100644 --- a/pandas/core/accessor.py +++ b/pandas/core/accessor.py @@ -16,11 +16,15 @@ class DirNamesMixin(object): ['asobject', 'base', 'data', 'flags', 'itemsize', 'strides']) def _dir_deletions(self): - """ delete unwanted __dir__ for this object """ + """ + Delete unwanted __dir__ for this object. + """ return self._accessors | self._deprecations def _dir_additions(self): - """ add additional __dir__ for this object """ + """ + Add additional __dir__ for this object. + """ rv = set() for accessor in self._accessors: try: @@ -33,7 +37,7 @@ def _dir_additions(self): def __dir__(self): """ Provide method name lookup and completion - Only provide 'public' methods + Only provide 'public' methods. """ rv = set(dir(type(self))) rv = (rv - self._dir_deletions()) | self._dir_additions() @@ -42,7 +46,7 @@ def __dir__(self): class PandasDelegate(object): """ - an abstract base class for delegating methods/properties + An abstract base class for delegating methods/properties. """ def _delegate_property_get(self, name, *args, **kwargs): @@ -65,10 +69,10 @@ def _add_delegate_accessors(cls, delegate, accessors, typ, ---------- cls : the class to add the methods/properties to delegate : the class to get methods/properties & doc-strings - acccessors : string list of accessors to add + accessors : string list of accessors to add typ : 'property' or 'method' overwrite : boolean, default False - overwrite the method/property in the target class if it exists + overwrite the method/property in the target class if it exists. """ def _create_delegator_property(name): @@ -117,7 +121,7 @@ def delegate_names(delegate, accessors, typ, overwrite=False): ---------- delegate : object the class to get methods/properties & doc-strings - acccessors : Sequence[str] + accessors : Sequence[str] List of accessor to add typ : {'property', 'method'} overwrite : boolean, default False diff --git a/pandas/core/common.py b/pandas/core/common.py index b4de0daa13b168..8bdff9d0b1a849 100644 --- a/pandas/core/common.py +++ b/pandas/core/common.py @@ -32,7 +32,8 @@ class SettingWithCopyWarning(Warning): def flatten(l): - """Flatten an arbitrarily nested sequence. + """ + Flatten an arbitrarily nested sequence. Parameters ---------- @@ -160,12 +161,16 @@ def cast_scalar_indexer(val): def _not_none(*args): - """Returns a generator consisting of the arguments that are not None""" + """ + Returns a generator consisting of the arguments that are not None. + """ return (arg for arg in args if arg is not None) def _any_none(*args): - """Returns a boolean indicating if any argument is None""" + """ + Returns a boolean indicating if any argument is None. + """ for arg in args: if arg is None: return True @@ -173,7 +178,9 @@ def _any_none(*args): def _all_none(*args): - """Returns a boolean indicating if all arguments are None""" + """ + Returns a boolean indicating if all arguments are None. + """ for arg in args: if arg is not None: return False @@ -181,7 +188,9 @@ def _all_none(*args): def _any_not_none(*args): - """Returns a boolean indicating if any argument is not None""" + """ + Returns a boolean indicating if any argument is not None. + """ for arg in args: if arg is not None: return True @@ -189,7 +198,9 @@ def _any_not_none(*args): def _all_not_none(*args): - """Returns a boolean indicating if all arguments are not None""" + """ + Returns a boolean indicating if all arguments are not None. + """ for arg in args: if arg is None: return False @@ -197,7 +208,9 @@ def _all_not_none(*args): def count_not_none(*args): - """Returns the count of arguments that are not None""" + """ + Returns the count of arguments that are not None. + """ return sum(x is not None for x in args) @@ -277,7 +290,9 @@ def maybe_make_list(obj): def is_null_slice(obj): - """ we have a null slice """ + """ + We have a null slice. + """ return (isinstance(obj, slice) and obj.start is None and obj.stop is None and obj.step is None) @@ -291,7 +306,9 @@ def is_true_slices(l): # TODO: used only once in indexing; belongs elsewhere? def is_full_slice(obj, l): - """ we have a full length slice """ + """ + We have a full length slice. + """ return (isinstance(obj, slice) and obj.start == 0 and obj.stop == l and obj.step is None) @@ -316,7 +333,7 @@ def get_callable_name(obj): def apply_if_callable(maybe_callable, obj, **kwargs): """ Evaluate possibly callable input using obj and kwargs if it is callable, - otherwise return as it is + otherwise return as it is. Parameters ---------- @@ -333,7 +350,8 @@ def apply_if_callable(maybe_callable, obj, **kwargs): def dict_compat(d): """ - Helper function to convert datetimelike-keyed dicts to Timestamp-keyed dict + Helper function to convert datetimelike-keyed dicts + to Timestamp-keyed dict. Parameters ---------- diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 36144c31dfef93..43798d64d11722 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -87,7 +87,8 @@ def __init__(self, values, placement, ndim=None): '{mgr}'.format(val=len(self.values), mgr=len(self.mgr_locs))) def _check_ndim(self, values, ndim): - """ndim inference and validation. + """ + ndim inference and validation. Infers ndim from 'values' if not provided to __init__. Validates that values.ndim and ndim are consistent if and only if diff --git a/pandas/core/missing.py b/pandas/core/missing.py index 15538b8196684e..cc7bdf95200d15 100644 --- a/pandas/core/missing.py +++ b/pandas/core/missing.py @@ -1,5 +1,5 @@ """ -Routines for filling missing data +Routines for filling missing data. """ from distutils.version import LooseVersion import operator @@ -116,7 +116,7 @@ def interpolate_1d(xvalues, yvalues, method='linear', limit=None, xvalues and yvalues will each be 1-d arrays of the same length. Bounds_error is currently hardcoded to False since non-scipy ones don't - take it as an argumnet. + take it as an argument. """ # Treat the original, non-scipy methods first. @@ -244,9 +244,9 @@ def interpolate_1d(xvalues, yvalues, method='linear', limit=None, def _interpolate_scipy_wrapper(x, y, new_x, method, fill_value=None, bounds_error=False, order=None, **kwargs): """ - passed off to scipy.interpolate.interp1d. method is scipy's kind. + Passed off to scipy.interpolate.interp1d. method is scipy's kind. Returns an array interpolated at new_x. Add any new methods to - the list in _clean_interp_method + the list in _clean_interp_method. """ try: from scipy import interpolate @@ -314,7 +314,7 @@ def _interpolate_scipy_wrapper(x, y, new_x, method, fill_value=None, def _from_derivatives(xi, yi, x, order=None, der=0, extrapolate=False): """ - Convenience function for interpolate.BPoly.from_derivatives + Convenience function for interpolate.BPoly.from_derivatives. Construct a piecewise polynomial in the Bernstein basis, compatible with the specified values and derivatives at breakpoints. @@ -325,7 +325,7 @@ def _from_derivatives(xi, yi, x, order=None, der=0, extrapolate=False): sorted 1D array of x-coordinates yi : array_like or list of array-likes yi[i][j] is the j-th derivative known at xi[i] - orders : None or int or array_like of ints. Default: None. + order: None or int or array_like of ints. Default: None. Specifies the degree of local polynomials. If not None, some derivatives are ignored. der : int or list @@ -344,8 +344,7 @@ def _from_derivatives(xi, yi, x, order=None, der=0, extrapolate=False): Returns ------- y : scalar or array_like - The result, of length R or length M or M by R, - + The result, of length R or length M or M by R. """ import scipy from scipy import interpolate @@ -418,8 +417,9 @@ def _akima_interpolate(xi, yi, x, der=0, axis=0): def interpolate_2d(values, method='pad', axis=0, limit=None, fill_value=None, dtype=None): - """ perform an actual interpolation of values, values will be make 2-d if - needed fills inplace, returns the result + """ + Perform an actual interpolation of values, values will be make 2-d if + needed fills inplace, returns the result. """ transf = (lambda x: x) if axis == 0 else (lambda x: x.T) @@ -533,13 +533,13 @@ def clean_reindex_fill_method(method): def fill_zeros(result, x, y, name, fill): """ - if this is a reversed op, then flip x,y + If this is a reversed op, then flip x,y - if we have an integer value (or array in y) + If we have an integer value (or array in y) and we have 0's, fill them with the fill, - return the result + return the result. - mask the nan's from x + Mask the nan's from x. """ if fill is None or is_float_dtype(result): return result diff --git a/pandas/core/resample.py b/pandas/core/resample.py index 7723827ff478a4..a7204fcd9dd20b 100644 --- a/pandas/core/resample.py +++ b/pandas/core/resample.py @@ -36,7 +36,6 @@ class Resampler(_GroupBy): - """ Class for resampling datetimelike data, a groupby-like operation. See aggregate, transform, and apply functions on this object. @@ -107,7 +106,7 @@ def __iter__(self): Returns ------- Generator yielding sequence of (name, subsetted object) - for each group + for each group. See Also -------- @@ -286,8 +285,8 @@ def transform(self, arg, *args, **kwargs): Parameters ---------- - func : function - To apply to each group. Should return a Series with the same index + arg : function + To apply to each group. Should return a Series with the same index. Returns ------- @@ -423,7 +422,7 @@ def pad(self, limit=None): Returns ------- - an upsampled Series + An upsampled Series. See Also -------- diff --git a/pandas/core/sparse/frame.py b/pandas/core/sparse/frame.py index 586193fe11850a..e0af11d13774c8 100644 --- a/pandas/core/sparse/frame.py +++ b/pandas/core/sparse/frame.py @@ -194,7 +194,9 @@ def sp_maker(x): return to_manager(sdict, columns, index) def _init_matrix(self, data, index, columns, dtype=None): - """ Init self from ndarray or list of lists """ + """ + Init self from ndarray or list of lists. + """ data = prep_ndarray(data, copy=False) index, columns = self._prep_index(data, index, columns) data = {idx: data[:, i] for i, idx in enumerate(columns)} @@ -202,7 +204,9 @@ def _init_matrix(self, data, index, columns, dtype=None): def _init_spmatrix(self, data, index, columns, dtype=None, fill_value=None): - """ Init self from scipy.sparse matrix """ + """ + Init self from scipy.sparse matrix. + """ index, columns = self._prep_index(data, index, columns) data = data.tocoo() N = len(index) @@ -302,7 +306,9 @@ def __getstate__(self): _default_kind=self._default_kind) def _unpickle_sparse_frame_compat(self, state): - """ original pickle format """ + """ + Original pickle format + """ series, cols, idx, fv, kind = state if not isinstance(cols, Index): # pragma: no cover @@ -338,7 +344,9 @@ def to_dense(self): return DataFrame(data, index=self.index, columns=self.columns) def _apply_columns(self, func): - """ get new SparseDataFrame applying func to each columns """ + """ + Get new SparseDataFrame applying func to each columns + """ new_data = {col: func(series) for col, series in compat.iteritems(self)} diff --git a/pandas/core/sparse/scipy_sparse.py b/pandas/core/sparse/scipy_sparse.py index 2d0ce2d5e5951b..8bb79d753e5f9c 100644 --- a/pandas/core/sparse/scipy_sparse.py +++ b/pandas/core/sparse/scipy_sparse.py @@ -90,7 +90,8 @@ def _get_index_subset_to_coord_dict(index, subset, sort_labels=False): def _sparse_series_to_coo(ss, row_levels=(0, ), column_levels=(1, ), sort_labels=False): - """ Convert a SparseSeries to a scipy.sparse.coo_matrix using index + """ + Convert a SparseSeries to a scipy.sparse.coo_matrix using index levels row_levels, column_levels as the row and column labels respectively. Returns the sparse_matrix, row and column labels. """ @@ -116,7 +117,8 @@ def _sparse_series_to_coo(ss, row_levels=(0, ), column_levels=(1, ), def _coo_to_sparse_series(A, dense_index=False): - """ Convert a scipy.sparse.coo_matrix to a SparseSeries. + """ + Convert a scipy.sparse.coo_matrix to a SparseSeries. Use the defaults given in the SparseSeries constructor. """ s = Series(A.data, MultiIndex.from_arrays((A.row, A.col)))