Skip to content

Commit

Permalink
Fix quotes position in pandas.core, typos and misspelled parameters. (p…
Browse files Browse the repository at this point in the history
  • Loading branch information
alanderex authored and Pingviinituutti committed Feb 28, 2019
1 parent e52341e commit 28468d8
Show file tree
Hide file tree
Showing 7 changed files with 76 additions and 44 deletions.
18 changes: 11 additions & 7 deletions pandas/core/accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,15 @@ class DirNamesMixin(object):
['asobject', 'base', 'data', 'flags', 'itemsize', 'strides'])

def _dir_deletions(self):
""" delete unwanted __dir__ for this object """
"""
Delete unwanted __dir__ for this object.
"""
return self._accessors | self._deprecations

def _dir_additions(self):
""" add additional __dir__ for this object """
"""
Add additional __dir__ for this object.
"""
rv = set()
for accessor in self._accessors:
try:
Expand All @@ -33,7 +37,7 @@ def _dir_additions(self):
def __dir__(self):
"""
Provide method name lookup and completion
Only provide 'public' methods
Only provide 'public' methods.
"""
rv = set(dir(type(self)))
rv = (rv - self._dir_deletions()) | self._dir_additions()
Expand All @@ -42,7 +46,7 @@ def __dir__(self):

class PandasDelegate(object):
"""
an abstract base class for delegating methods/properties
An abstract base class for delegating methods/properties.
"""

def _delegate_property_get(self, name, *args, **kwargs):
Expand All @@ -65,10 +69,10 @@ def _add_delegate_accessors(cls, delegate, accessors, typ,
----------
cls : the class to add the methods/properties to
delegate : the class to get methods/properties & doc-strings
acccessors : string list of accessors to add
accessors : string list of accessors to add
typ : 'property' or 'method'
overwrite : boolean, default False
overwrite the method/property in the target class if it exists
overwrite the method/property in the target class if it exists.
"""

def _create_delegator_property(name):
Expand Down Expand Up @@ -117,7 +121,7 @@ def delegate_names(delegate, accessors, typ, overwrite=False):
----------
delegate : object
the class to get methods/properties & doc-strings
acccessors : Sequence[str]
accessors : Sequence[str]
List of accessor to add
typ : {'property', 'method'}
overwrite : boolean, default False
Expand Down
40 changes: 29 additions & 11 deletions pandas/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ class SettingWithCopyWarning(Warning):


def flatten(l):
"""Flatten an arbitrarily nested sequence.
"""
Flatten an arbitrarily nested sequence.
Parameters
----------
Expand Down Expand Up @@ -160,44 +161,56 @@ def cast_scalar_indexer(val):


def _not_none(*args):
"""Returns a generator consisting of the arguments that are not None"""
"""
Returns a generator consisting of the arguments that are not None.
"""
return (arg for arg in args if arg is not None)


def _any_none(*args):
"""Returns a boolean indicating if any argument is None"""
"""
Returns a boolean indicating if any argument is None.
"""
for arg in args:
if arg is None:
return True
return False


def _all_none(*args):
"""Returns a boolean indicating if all arguments are None"""
"""
Returns a boolean indicating if all arguments are None.
"""
for arg in args:
if arg is not None:
return False
return True


def _any_not_none(*args):
"""Returns a boolean indicating if any argument is not None"""
"""
Returns a boolean indicating if any argument is not None.
"""
for arg in args:
if arg is not None:
return True
return False


def _all_not_none(*args):
"""Returns a boolean indicating if all arguments are not None"""
"""
Returns a boolean indicating if all arguments are not None.
"""
for arg in args:
if arg is None:
return False
return True


def count_not_none(*args):
"""Returns the count of arguments that are not None"""
"""
Returns the count of arguments that are not None.
"""
return sum(x is not None for x in args)


Expand Down Expand Up @@ -277,7 +290,9 @@ def maybe_make_list(obj):


def is_null_slice(obj):
""" we have a null slice """
"""
We have a null slice.
"""
return (isinstance(obj, slice) and obj.start is None and
obj.stop is None and obj.step is None)

Expand All @@ -291,7 +306,9 @@ def is_true_slices(l):

# TODO: used only once in indexing; belongs elsewhere?
def is_full_slice(obj, l):
""" we have a full length slice """
"""
We have a full length slice.
"""
return (isinstance(obj, slice) and obj.start == 0 and obj.stop == l and
obj.step is None)

Expand All @@ -316,7 +333,7 @@ def get_callable_name(obj):
def apply_if_callable(maybe_callable, obj, **kwargs):
"""
Evaluate possibly callable input using obj and kwargs if it is callable,
otherwise return as it is
otherwise return as it is.
Parameters
----------
Expand All @@ -333,7 +350,8 @@ def apply_if_callable(maybe_callable, obj, **kwargs):

def dict_compat(d):
"""
Helper function to convert datetimelike-keyed dicts to Timestamp-keyed dict
Helper function to convert datetimelike-keyed dicts
to Timestamp-keyed dict.
Parameters
----------
Expand Down
3 changes: 2 additions & 1 deletion pandas/core/internals/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,8 @@ def __init__(self, values, placement, ndim=None):
'{mgr}'.format(val=len(self.values), mgr=len(self.mgr_locs)))

def _check_ndim(self, values, ndim):
"""ndim inference and validation.
"""
ndim inference and validation.
Infers ndim from 'values' if not provided to __init__.
Validates that values.ndim and ndim are consistent if and only if
Expand Down
28 changes: 14 additions & 14 deletions pandas/core/missing.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
Routines for filling missing data
Routines for filling missing data.
"""
from distutils.version import LooseVersion
import operator
Expand Down Expand Up @@ -116,7 +116,7 @@ def interpolate_1d(xvalues, yvalues, method='linear', limit=None,
xvalues and yvalues will each be 1-d arrays of the same length.
Bounds_error is currently hardcoded to False since non-scipy ones don't
take it as an argumnet.
take it as an argument.
"""
# Treat the original, non-scipy methods first.

Expand Down Expand Up @@ -244,9 +244,9 @@ def interpolate_1d(xvalues, yvalues, method='linear', limit=None,
def _interpolate_scipy_wrapper(x, y, new_x, method, fill_value=None,
bounds_error=False, order=None, **kwargs):
"""
passed off to scipy.interpolate.interp1d. method is scipy's kind.
Passed off to scipy.interpolate.interp1d. method is scipy's kind.
Returns an array interpolated at new_x. Add any new methods to
the list in _clean_interp_method
the list in _clean_interp_method.
"""
try:
from scipy import interpolate
Expand Down Expand Up @@ -314,7 +314,7 @@ def _interpolate_scipy_wrapper(x, y, new_x, method, fill_value=None,

def _from_derivatives(xi, yi, x, order=None, der=0, extrapolate=False):
"""
Convenience function for interpolate.BPoly.from_derivatives
Convenience function for interpolate.BPoly.from_derivatives.
Construct a piecewise polynomial in the Bernstein basis, compatible
with the specified values and derivatives at breakpoints.
Expand All @@ -325,7 +325,7 @@ def _from_derivatives(xi, yi, x, order=None, der=0, extrapolate=False):
sorted 1D array of x-coordinates
yi : array_like or list of array-likes
yi[i][j] is the j-th derivative known at xi[i]
orders : None or int or array_like of ints. Default: None.
order: None or int or array_like of ints. Default: None.
Specifies the degree of local polynomials. If not None, some
derivatives are ignored.
der : int or list
Expand All @@ -344,8 +344,7 @@ def _from_derivatives(xi, yi, x, order=None, der=0, extrapolate=False):
Returns
-------
y : scalar or array_like
The result, of length R or length M or M by R,
The result, of length R or length M or M by R.
"""
import scipy
from scipy import interpolate
Expand Down Expand Up @@ -418,8 +417,9 @@ def _akima_interpolate(xi, yi, x, der=0, axis=0):

def interpolate_2d(values, method='pad', axis=0, limit=None, fill_value=None,
dtype=None):
""" perform an actual interpolation of values, values will be make 2-d if
needed fills inplace, returns the result
"""
Perform an actual interpolation of values, values will be make 2-d if
needed fills inplace, returns the result.
"""

transf = (lambda x: x) if axis == 0 else (lambda x: x.T)
Expand Down Expand Up @@ -533,13 +533,13 @@ def clean_reindex_fill_method(method):

def fill_zeros(result, x, y, name, fill):
"""
if this is a reversed op, then flip x,y
If this is a reversed op, then flip x,y
if we have an integer value (or array in y)
If we have an integer value (or array in y)
and we have 0's, fill them with the fill,
return the result
return the result.
mask the nan's from x
Mask the nan's from x.
"""
if fill is None or is_float_dtype(result):
return result
Expand Down
9 changes: 4 additions & 5 deletions pandas/core/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@


class Resampler(_GroupBy):

"""
Class for resampling datetimelike data, a groupby-like operation.
See aggregate, transform, and apply functions on this object.
Expand Down Expand Up @@ -107,7 +106,7 @@ def __iter__(self):
Returns
-------
Generator yielding sequence of (name, subsetted object)
for each group
for each group.
See Also
--------
Expand Down Expand Up @@ -286,8 +285,8 @@ def transform(self, arg, *args, **kwargs):
Parameters
----------
func : function
To apply to each group. Should return a Series with the same index
arg : function
To apply to each group. Should return a Series with the same index.
Returns
-------
Expand Down Expand Up @@ -423,7 +422,7 @@ def pad(self, limit=None):
Returns
-------
an upsampled Series
An upsampled Series.
See Also
--------
Expand Down
16 changes: 12 additions & 4 deletions pandas/core/sparse/frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,15 +194,19 @@ def sp_maker(x):
return to_manager(sdict, columns, index)

def _init_matrix(self, data, index, columns, dtype=None):
""" Init self from ndarray or list of lists """
"""
Init self from ndarray or list of lists.
"""
data = prep_ndarray(data, copy=False)
index, columns = self._prep_index(data, index, columns)
data = {idx: data[:, i] for i, idx in enumerate(columns)}
return self._init_dict(data, index, columns, dtype)

def _init_spmatrix(self, data, index, columns, dtype=None,
fill_value=None):
""" Init self from scipy.sparse matrix """
"""
Init self from scipy.sparse matrix.
"""
index, columns = self._prep_index(data, index, columns)
data = data.tocoo()
N = len(index)
Expand Down Expand Up @@ -302,7 +306,9 @@ def __getstate__(self):
_default_kind=self._default_kind)

def _unpickle_sparse_frame_compat(self, state):
""" original pickle format """
"""
Original pickle format
"""
series, cols, idx, fv, kind = state

if not isinstance(cols, Index): # pragma: no cover
Expand Down Expand Up @@ -338,7 +344,9 @@ def to_dense(self):
return DataFrame(data, index=self.index, columns=self.columns)

def _apply_columns(self, func):
""" get new SparseDataFrame applying func to each columns """
"""
Get new SparseDataFrame applying func to each columns
"""

new_data = {col: func(series)
for col, series in compat.iteritems(self)}
Expand Down
6 changes: 4 additions & 2 deletions pandas/core/sparse/scipy_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,8 @@ def _get_index_subset_to_coord_dict(index, subset, sort_labels=False):

def _sparse_series_to_coo(ss, row_levels=(0, ), column_levels=(1, ),
sort_labels=False):
""" Convert a SparseSeries to a scipy.sparse.coo_matrix using index
"""
Convert a SparseSeries to a scipy.sparse.coo_matrix using index
levels row_levels, column_levels as the row and column
labels respectively. Returns the sparse_matrix, row and column labels.
"""
Expand All @@ -116,7 +117,8 @@ def _sparse_series_to_coo(ss, row_levels=(0, ), column_levels=(1, ),


def _coo_to_sparse_series(A, dense_index=False):
""" Convert a scipy.sparse.coo_matrix to a SparseSeries.
"""
Convert a scipy.sparse.coo_matrix to a SparseSeries.
Use the defaults given in the SparseSeries constructor.
"""
s = Series(A.data, MultiIndex.from_arrays((A.row, A.col)))
Expand Down

0 comments on commit 28468d8

Please sign in to comment.