Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BEAM-11516] Upgrade to pylint 2.11.1, fix warnings #15612

Merged
merged 10 commits into from
Oct 4, 2021
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ repos:
- repo: https://github.com/pycqa/pylint
# this rev is a release tag in the repo above and corresponds with a pylint
# version. make sure this matches the version of pylint in tox.ini.
rev: pylint-2.4.3
rev: v2.11.1
hooks:
- id: pylint
args: ["--rcfile=sdks/python/.pylintrc"]
Expand Down
4 changes: 2 additions & 2 deletions learning/katas/python/log_elements.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class LogElements(beam.PTransform):
class _LoggingFn(beam.DoFn):

def __init__(self, prefix='', with_timestamp=False, with_window=False):
super(LogElements._LoggingFn, self).__init__()
super().__init__()
self.prefix = prefix
self.with_timestamp = with_timestamp
self.with_window = with_window
Expand All @@ -43,7 +43,7 @@ def process(self, element, timestamp=beam.DoFn.TimestampParam,

def __init__(self, label=None, prefix='',
with_timestamp=False, with_window=False):
super(LogElements, self).__init__(label)
super().__init__(label)
self.prefix = prefix
self.with_timestamp = with_timestamp
self.with_window = with_window
Expand Down
6 changes: 6 additions & 0 deletions sdks/python/.pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -80,16 +80,20 @@ bad-names=
[MESSAGES CONTROL]
disable =
abstract-method,
abstract-class-instantiated,
arguments-differ,
arguments-renamed,
attribute-defined-outside-init,
bad-builtin,
bad-super-call,
bad-continuation,
broad-except,
comparison-with-callable,
consider-using-enumerate,
consider-using-f-string,
consider-using-in,
consider-using-sys-exit,
consider-using-with,
cyclic-import,
design,
fixme,
Expand Down Expand Up @@ -122,6 +126,7 @@ disable =
not-callable,
pointless-statement,
protected-access,
raise-missing-from, #TODO(BEAM-12991) Enable and fix warnings
raising-format-tuple,
raising-non-exception,
redefined-builtin,
Expand All @@ -143,6 +148,7 @@ disable =
unnecessary-pass,
unneeded-not,
unsubscriptable-object,
unspecified-encoding, #TODO(BEAM-12992) Enable explicit encoding
unused-argument,
unused-wildcard-import,
useless-object-inheritance,
Expand Down
5 changes: 2 additions & 3 deletions sdks/python/apache_beam/coders/coder_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -747,7 +747,7 @@ def encode_to_stream(self, value, out, nested):
def decode_from_stream(self, in_, nested):
# type: (create_InputStream, bool) -> IntervalWindow
if not TYPE_CHECKING:
global IntervalWindow
global IntervalWindow # pylint: disable=global-variable-not-assigned
if IntervalWindow is None:
from apache_beam.transforms.window import IntervalWindow
# instantiating with None is not part of the public interface
Expand Down Expand Up @@ -1390,8 +1390,7 @@ class ParamWindowedValueCoderImpl(WindowedValueCoderImpl):
and pane info values during decoding when reconstructing the windowed
value."""
def __init__(self, value_coder, window_coder, payload):
super(ParamWindowedValueCoderImpl,
self).__init__(value_coder, TimestampCoderImpl(), window_coder)
super().__init__(value_coder, TimestampCoderImpl(), window_coder)
self._timestamp, self._windows, self._pane_info = self._from_proto(
payload, window_coder)

Expand Down
12 changes: 6 additions & 6 deletions sdks/python/apache_beam/coders/coders.py
Original file line number Diff line number Diff line change
Expand Up @@ -730,7 +730,7 @@ def is_deterministic(self):
return False

def as_cloud_object(self, coders_context=None, is_pair_like=True):
value = super(_PickleCoderBase, self).as_cloud_object(coders_context)
value = super().as_cloud_object(coders_context)
# We currently use this coder in places where we cannot infer the coder to
# use for the value type in a more granular way. In places where the
# service expects a pair, it checks for the "is_pair_like" key, in which
Expand Down Expand Up @@ -767,7 +767,7 @@ def __hash__(self):
class _MemoizingPickleCoder(_PickleCoderBase):
"""Coder using Python's pickle functionality with memoization."""
def __init__(self, cache_size=16):
super(_MemoizingPickleCoder, self).__init__()
super().__init__()
self.cache_size = cache_size

def _create_impl(self):
Expand Down Expand Up @@ -867,7 +867,7 @@ def to_type_hint(self):
return Any

def as_cloud_object(self, coders_context=None, is_pair_like=True):
value = super(FastCoder, self).as_cloud_object(coders_context)
value = super().as_cloud_object(coders_context)
# We currently use this coder in places where we cannot infer the coder to
# use for the value type in a more granular way. In places where the
# service expects a pair, it checks for the "is_pair_like" key, in which
Expand Down Expand Up @@ -1088,7 +1088,7 @@ def as_cloud_object(self, coders_context=None):
],
}

return super(TupleCoder, self).as_cloud_object(coders_context)
return super().as_cloud_object(coders_context)

def _get_component_coders(self):
# type: () -> Tuple[Coder, ...]
Expand Down Expand Up @@ -1250,7 +1250,7 @@ class GlobalWindowCoder(SingletonCoder):
"""Coder for global windows."""
def __init__(self):
from apache_beam.transforms import window
super(GlobalWindowCoder, self).__init__(window.GlobalWindow())
super().__init__(window.GlobalWindow())

def as_cloud_object(self, coders_context=None):
return {
Expand Down Expand Up @@ -1357,7 +1357,7 @@ def __hash__(self):
class ParamWindowedValueCoder(WindowedValueCoder):
"""A coder used for parameterized windowed values."""
def __init__(self, payload, components):
super(ParamWindowedValueCoder, self).__init__(components[0], components[1])
super().__init__(components[0], components[1])
self.payload = payload

def _create_impl(self):
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/apache_beam/coders/coders_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ class AvroTestCoder(coders.AvroGenericCoder):
"""

def __init__(self):
super(AvroTestCoder, self).__init__(self.SCHEMA)
super().__init__(self.SCHEMA)


class AvroTestRecord(AvroRecord):
Expand Down
13 changes: 6 additions & 7 deletions sdks/python/apache_beam/coders/coders_test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,12 +128,12 @@ class CodersTest(unittest.TestCase):
False,
]
test_values = test_values_deterministic + [
dict(),
{},
{
'a': 'b'
},
{
0: dict(), 1: len
0: {}, 1: len
},
set(),
{'a', 'b'},
Expand Down Expand Up @@ -223,13 +223,12 @@ def test_deterministic_coder(self):
tuple(self.test_values_deterministic))

with self.assertRaises(TypeError):
self.check_coder(deterministic_coder, dict())
self.check_coder(deterministic_coder, {})
with self.assertRaises(TypeError):
self.check_coder(deterministic_coder, [1, dict()])
self.check_coder(deterministic_coder, [1, {}])

self.check_coder(
coders.TupleCoder((deterministic_coder, coder)), (1, dict()),
('a', [dict()]))
coders.TupleCoder((deterministic_coder, coder)), (1, {}), ('a', [{}]))

self.check_coder(deterministic_coder, test_message.MessageA(field1='value'))

Expand Down Expand Up @@ -260,7 +259,7 @@ def test_deterministic_coder(self):
with self.assertRaises(TypeError):
self.check_coder(deterministic_coder, DefinesGetState(1))
with self.assertRaises(TypeError):
self.check_coder(deterministic_coder, DefinesGetAndSetState(dict()))
self.check_coder(deterministic_coder, DefinesGetAndSetState({}))

def test_dill_coder(self):
cell_value = (lambda x: lambda: x)(0).__closure__[0]
Expand Down
1 change: 1 addition & 0 deletions sdks/python/apache_beam/coders/row_coder_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ def test_overflows(self):
)

# Encode max+1/min-1 ints to make sure they DO throw an error
# pylint: disable=cell-var-from-loop
for case in overflow:
self.assertRaises(OverflowError, lambda: c.encode(case))

Expand Down
2 changes: 1 addition & 1 deletion sdks/python/apache_beam/coders/slow_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ class ByteCountingOutputStream(OutputStream):
A pure Python implementation of stream.ByteCountingOutputStream."""
def __init__(self):
# Note that we don't actually use any of the data initialized by our super.
super(ByteCountingOutputStream, self).__init__()
super().__init__()
self.count = 0

def write(self, byte_array, nested=False):
Expand Down
8 changes: 4 additions & 4 deletions sdks/python/apache_beam/coders/standard_coders_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,13 +151,13 @@ class StandardCodersTest(unittest.TestCase):
window_parser: windowed_value.create(
value_parser(x['value']),
x['timestamp'] * 1000,
tuple([window_parser(w) for w in x['windows']])),
tuple(window_parser(w) for w in x['windows'])),
'beam:coder:param_windowed_value:v1': lambda x,
value_parser,
window_parser: windowed_value.create(
value_parser(x['value']),
x['timestamp'] * 1000,
tuple([window_parser(w) for w in x['windows']]),
tuple(window_parser(w) for w in x['windows']),
PaneInfo(
x['pane']['is_first'],
x['pane']['is_last'],
Expand All @@ -170,7 +170,7 @@ class StandardCodersTest(unittest.TestCase):
user_key=value_parser(x['userKey']),
dynamic_timer_tag=x['dynamicTimerTag'],
clear_bit=x['clearBit'],
windows=tuple([window_parser(w) for w in x['windows']]),
windows=tuple(window_parser(w) for w in x['windows']),
fire_timestamp=None,
hold_timestamp=None,
paneinfo=None) if x['clearBit'] else userstate.Timer(
Expand All @@ -179,7 +179,7 @@ class StandardCodersTest(unittest.TestCase):
clear_bit=x['clearBit'],
fire_timestamp=Timestamp(micros=x['fireTimestamp'] * 1000),
hold_timestamp=Timestamp(micros=x['holdTimestamp'] * 1000),
windows=tuple([window_parser(w) for w in x['windows']]),
windows=tuple(window_parser(w) for w in x['windows']),
paneinfo=PaneInfo(
x['pane']['is_first'],
x['pane']['is_last'],
Expand Down
17 changes: 7 additions & 10 deletions sdks/python/apache_beam/dataframe/doctests.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def __init__(self, pandas_obj, test_env):

def __call__(self, *args, **kwargs):
result = self._pandas_obj(*args, **kwargs)
if type(result) in DeferredBase._pandas_type_map.keys():
if type(result) in DeferredBase._pandas_type_map:
placeholder = expressions.PlaceholderExpression(result.iloc[0:0])
self._test_env._inputs[placeholder] = result
return DeferredBase.wrap(placeholder)
Expand Down Expand Up @@ -322,8 +322,7 @@ def check_output(self, want, got, optionflags):

self.reset()
want, got = self.fix(want, got)
return super(_DeferrredDataframeOutputChecker,
self).check_output(want, got, optionflags)
return super().check_output(want, got, optionflags)

def output_difference(self, example, got, optionflags):
want, got = self.fix(example.want, got)
Expand All @@ -335,8 +334,7 @@ def output_difference(self, example, got, optionflags):
example.lineno,
example.indent,
example.options)
return super(_DeferrredDataframeOutputChecker,
self).output_difference(example, got, optionflags)
return super().output_difference(example, got, optionflags)


class BeamDataframeDoctestRunner(doctest.DocTestRunner):
Expand Down Expand Up @@ -374,7 +372,7 @@ def to_callable(cond):
for test,
examples in (skip or {}).items()
}
super(BeamDataframeDoctestRunner, self).__init__(
super().__init__(
checker=_DeferrredDataframeOutputChecker(self._test_env, use_beam),
**kwargs)
self.success = 0
Expand Down Expand Up @@ -412,7 +410,7 @@ def run(self, test, **kwargs):
# Don't fail doctests that raise this error.
example.exc_msg = '|'.join(allowed_exceptions)
with self._test_env.context():
result = super(BeamDataframeDoctestRunner, self).run(test, **kwargs)
result = super().run(test, **kwargs)
# Can't add attributes to builtin result.
result = AugmentedTestResults(result.failed, result.attempted)
result.summary = self.summary()
Expand Down Expand Up @@ -444,14 +442,13 @@ def extract_concise_reason(got, expected_exc):
# use the wrong previous value.
del test.globs[var]

return super(BeamDataframeDoctestRunner,
self).report_success(out, test, example, got)
return super().report_success(out, test, example, got)

def fake_pandas_module(self):
return self._test_env.fake_pandas_module()

def summarize(self):
super(BeamDataframeDoctestRunner, self).summarize()
super().summarize()
self.summary().summarize()

def summary(self):
Expand Down
1 change: 1 addition & 0 deletions sdks/python/apache_beam/dataframe/doctests_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,7 @@ def test_failed_assignment(self):

def test_rst_ipython(self):
try:
# pylint: disable=unused-import
import IPython
except ImportError:
raise unittest.SkipTest('IPython not available')
Expand Down
10 changes: 5 additions & 5 deletions sdks/python/apache_beam/dataframe/expressions.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def is_scalar(expr):

if expr not in self._bindings:
if is_scalar(expr) or not expr.args():
result = super(PartitioningSession, self).evaluate(expr)
result = super().evaluate(expr)
else:
scaler_args = [arg for arg in expr.args() if is_scalar(arg)]

Expand Down Expand Up @@ -260,7 +260,7 @@ def __init__(
proxy: A proxy object with the type expected to be bound to this
expression. Used for type checking at pipeline construction time.
"""
super(PlaceholderExpression, self).__init__('placeholder', proxy)
super().__init__('placeholder', proxy)
self._reference = reference

def placeholders(self):
Expand Down Expand Up @@ -296,7 +296,7 @@ def __init__(
"""
if proxy is None:
proxy = value
super(ConstantExpression, self).__init__('constant', proxy)
super().__init__('constant', proxy)
self._value = value

def placeholders(self):
Expand Down Expand Up @@ -357,7 +357,7 @@ def __init__(
args = tuple(args)
if proxy is None:
proxy = func(*(arg.proxy() for arg in args))
super(ComputedExpression, self).__init__(name, proxy, _id)
super().__init__(name, proxy, _id)
self._func = func
self._args = args
self._requires_partition_by = requires_partition_by
Expand Down Expand Up @@ -409,5 +409,5 @@ def allow_non_parallel_operations(allow=True):

class NonParallelOperation(Exception):
def __init__(self, msg):
super(NonParallelOperation, self).__init__(self, msg)
super().__init__(self, msg)
self.msg = msg
4 changes: 2 additions & 2 deletions sdks/python/apache_beam/dataframe/frame_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def get(ix):
requires_partition_by=partitionings.Arbitrary(),
preserves_partition_by=partitionings.Singleton())

return tuple([cls.wrap(get(ix)) for ix in range(len(expr.proxy()))])
return tuple(cls.wrap(get(ix)) for ix in range(len(expr.proxy())))
elif proxy_type in cls._pandas_type_map:
wrapper_type = cls._pandas_type_map[proxy_type]
else:
Expand Down Expand Up @@ -641,4 +641,4 @@ def __init__(self, msg, reason=None):
if 'url' in reason_data:
msg = f"{msg}\nFor more information see {reason_data['url']}."

super(WontImplementError, self).__init__(msg)
super().__init__(msg)
2 changes: 1 addition & 1 deletion sdks/python/apache_beam/dataframe/frames.py
Original file line number Diff line number Diff line change
Expand Up @@ -3463,7 +3463,7 @@ def __init__(self, expr, kwargs,
:param grouping_indexes: list of index names (or index level numbers) to be
grouped.
:param kwargs: Keywords args passed to the original groupby(..) call."""
super(DeferredGroupBy, self).__init__(expr)
super().__init__(expr)
self._ungrouped = ungrouped
self._ungrouped_with_index = ungrouped_with_index
self._projection = projection
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/apache_beam/dataframe/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ def __init__(self, args, kwargs, read_chunk_size=_DEFAULT_BYTES_CHUNKSIZE):
'for splittable csv reads.')
if kwargs.get('skipfooter', 0):
raise ValueError('Splittablility incompatible with skipping footers.')
super(_CsvSplitter, self).__init__(
super().__init__(
_maybe_encode(kwargs.get('lineterminator', b'\n')),
_DEFAULT_BYTES_CHUNKSIZE)
self._kwargs = kwargs
Expand Down
2 changes: 1 addition & 1 deletion sdks/python/apache_beam/dataframe/io_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def test_truncating_filehandle(self):
min(len(s) for s in splits), len(numbers) * 0.9**20 * 0.1)

@parameterized.expand([
('defaults', dict()),
('defaults', {}),
('header', dict(header=1)),
('multi_header', dict(header=[0, 1])),
('multi_header', dict(header=[0, 1, 4])),
Expand Down
Loading