From 472f0ff8518c8df94f083df77cdae8fa84418149 Mon Sep 17 00:00:00 2001 From: Benjamin Gonzalez <74670721+benWize@users.noreply.github.com> Date: Mon, 4 Oct 2021 10:21:57 -0500 Subject: [PATCH] [BEAM-11516] Upgrade to pylint 2.11.1, fix warnings (#15612) * [BEAM-11516] Upgrade pylint 2.11.1, fix warnings * [BEAM-11516] Fix typo * [BEAM-11516] Fix error infering __class__ * [BEAM-11516] Rollback operations super style * [BEAM-11516] Fix style * [BEAM-11516] Rollback raise-missing-from fixed warnings * [BEAM-11516] Remove unused pylint disable comments, add TODOs to enable some warnings * [BEAM-11516] Remove from error leftover * [BEAM-11516] Fix pylint warnings --- .pre-commit-config.yaml | 2 +- learning/katas/python/log_elements.py | 4 +- sdks/python/.pylintrc | 6 +++ sdks/python/apache_beam/coders/coder_impl.py | 5 +-- sdks/python/apache_beam/coders/coders.py | 12 ++--- sdks/python/apache_beam/coders/coders_test.py | 2 +- .../apache_beam/coders/coders_test_common.py | 13 +++--- .../apache_beam/coders/row_coder_test.py | 1 + sdks/python/apache_beam/coders/slow_stream.py | 2 +- .../coders/standard_coders_test.py | 8 ++-- sdks/python/apache_beam/dataframe/doctests.py | 17 +++---- .../apache_beam/dataframe/doctests_test.py | 1 + .../apache_beam/dataframe/expressions.py | 10 ++--- .../apache_beam/dataframe/frame_base.py | 4 +- sdks/python/apache_beam/dataframe/frames.py | 2 +- sdks/python/apache_beam/dataframe/io.py | 2 +- sdks/python/apache_beam/dataframe/io_test.py | 2 +- .../dataframe/pandas_top_level_functions.py | 2 +- .../apache_beam/dataframe/partitionings.py | 1 - .../dataframe/partitionings_test.py | 2 +- .../apache_beam/dataframe/transforms.py | 2 +- .../apache_beam/examples/avro_bitcoin.py | 2 +- .../examples/complete/autocomplete.py | 2 +- .../examples/complete/game/game_stats.py | 6 +-- .../complete/game/hourly_team_score.py | 8 ++-- .../examples/complete/game/leader_board.py | 10 ++--- .../examples/complete/game/user_score.py | 4 +- .../complete/top_wikipedia_sessions.py | 2 +- .../examples/cookbook/bigtableio_it_test.py | 2 +- .../apache_beam/examples/fastavro_it_test.py | 4 +- .../examples/flink/flink_streaming_impulse.py | 2 +- .../apache_beam/examples/snippets/snippets.py | 4 +- .../examples/snippets/snippets_test.py | 2 +- .../examples/streaming_wordcount.py | 2 +- .../examples/streaming_wordcount_debugging.py | 2 +- .../examples/windowed_wordcount.py | 2 +- .../examples/wordcount_debugging.py | 2 +- .../apache_beam/examples/wordcount_it_test.py | 3 +- .../examples/wordcount_with_metrics.py | 2 +- sdks/python/apache_beam/internal/gcp/auth.py | 3 +- .../apache_beam/internal/metrics/metric.py | 6 +-- sdks/python/apache_beam/io/avroio.py | 10 ++--- sdks/python/apache_beam/io/avroio_test.py | 6 +-- .../python/apache_beam/io/aws/s3filesystem.py | 2 +- sdks/python/apache_beam/io/concat_source.py | 4 +- .../io/external/generate_sequence.py | 2 +- .../io/external/xlang_kafkaio_it_test.py | 2 +- .../io/external/xlang_kinesisio_it_test.py | 2 +- .../io/external/xlang_parquetio_test.py | 2 +- .../io/external/xlang_snowflakeio_it_test.py | 2 +- .../apache_beam/io/filebasedsink_test.py | 4 +- sdks/python/apache_beam/io/fileio_test.py | 4 +- sdks/python/apache_beam/io/filesystem.py | 2 +- sdks/python/apache_beam/io/filesystem_test.py | 4 +- sdks/python/apache_beam/io/filesystemio.py | 2 +- sdks/python/apache_beam/io/gcp/__init__.py | 2 +- .../io/gcp/bigquery_read_perf_test.py | 2 +- .../io/gcp/bigquery_write_perf_test.py | 2 +- sdks/python/apache_beam/io/gcp/bigtableio.py | 6 +-- .../io/gcp/datastore/v1new/datastoreio.py | 10 ++--- .../io/gcp/datastore/v1new/query_splitter.py | 4 +- .../datastore/v1new/rampup_throttling_fn.py | 2 +- .../io/gcp/experimental/spannerio.py | 2 +- .../experimental/spannerio_read_perf_test.py | 2 +- .../experimental/spannerio_write_perf_test.py | 2 +- .../apache_beam/io/gcp/gcsfilesystem.py | 1 + sdks/python/apache_beam/io/gcp/gcsio.py | 2 +- .../io/gcp/gcsio_integration_test.py | 4 +- .../clients/bigquery/bigquery_v2_client.py | 18 ++++---- .../clients/storage/storage_v1_client.py | 20 ++++----- sdks/python/apache_beam/io/gcp/pubsub.py | 8 ++-- .../apache_beam/io/gcp/pubsub_io_perf_test.py | 8 ++-- sdks/python/apache_beam/io/gcp/spanner.py | 2 +- .../io/gcp/tests/bigquery_matcher.py | 6 +-- .../io/gcp/tests/xlang_spannerio_it_test.py | 2 +- .../python/apache_beam/io/hadoopfilesystem.py | 2 +- sdks/python/apache_beam/io/iobase.py | 10 ++--- sdks/python/apache_beam/io/iobase_test.py | 10 ++--- sdks/python/apache_beam/io/jdbc.py | 4 +- sdks/python/apache_beam/io/kafka.py | 4 +- sdks/python/apache_beam/io/kinesis.py | 4 +- sdks/python/apache_beam/io/mongodbio.py | 5 +-- sdks/python/apache_beam/io/parquetio.py | 16 +++---- .../apache_beam/io/parquetio_it_test.py | 2 +- sdks/python/apache_beam/io/range_trackers.py | 2 +- .../apache_beam/io/restriction_trackers.py | 2 +- .../apache_beam/io/source_test_utils_test.py | 2 +- sdks/python/apache_beam/io/textio.py | 19 ++++---- sdks/python/apache_beam/io/textio_test.py | 4 +- sdks/python/apache_beam/io/tfrecordio.py | 10 ++--- sdks/python/apache_beam/metrics/cells.py | 6 +-- sdks/python/apache_beam/metrics/execution.py | 4 +- sdks/python/apache_beam/metrics/metric.py | 6 +-- .../apache_beam/metrics/monitoring_infos.py | 4 +- .../apache_beam/ml/gcp/videointelligenceml.py | 8 ++-- sdks/python/apache_beam/ml/gcp/visionml.py | 6 +-- .../apache_beam/options/pipeline_options.py | 4 +- sdks/python/apache_beam/pipeline.py | 12 ++--- sdks/python/apache_beam/pipeline_test.py | 4 +- sdks/python/apache_beam/pvalue.py | 4 +- sdks/python/apache_beam/runners/common.py | 6 +-- .../runners/dataflow/dataflow_metrics.py | 4 +- .../runners/dataflow/dataflow_runner.py | 4 +- .../runners/dataflow/dataflow_runner_test.py | 2 +- .../runners/dataflow/internal/apiclient.py | 7 ++- .../dataflow/internal/apiclient_test.py | 6 +-- .../clients/dataflow/dataflow_v1b3_client.py | 44 +++++++++---------- .../runners/dataflow/native_io/iobase.py | 2 +- .../runners/dataflow/ptransform_overrides.py | 4 +- .../runners/dataflow/test_dataflow_runner.py | 3 +- ...consumer_tracking_pipeline_visitor_test.py | 10 +++-- .../runners/direct/direct_runner.py | 8 ++-- .../runners/direct/direct_userstate.py | 12 ++--- .../runners/direct/evaluation_context.py | 2 +- .../apache_beam/runners/direct/executor.py | 8 ++-- .../runners/direct/sdf_direct_runner_test.py | 2 +- .../runners/direct/test_direct_runner.py | 2 +- .../runners/direct/transform_evaluator.py | 32 +++++++------- .../runners/interactive/augmented_pipeline.py | 4 +- .../interactive/background_caching_job.py | 2 +- .../runners/interactive/cache_manager.py | 2 +- .../display/interactive_pipeline_graph.py | 2 +- .../display/pcoll_visualization_test.py | 2 +- .../display/pipeline_graph_test.py | 2 +- .../interactive/interactive_beam_test.py | 10 ++--- .../interactive_environment_test.py | 4 +- .../runners/interactive/interactive_runner.py | 4 +- .../interactive/interactive_runner_test.py | 2 +- .../interactive_environment_inspector_test.py | 10 ++--- .../runners/interactive/pipeline_fragment.py | 2 +- .../interactive/pipeline_fragment_test.py | 8 ++-- .../interactive/pipeline_instrument_test.py | 16 +++---- .../testing/integration/notebook_executor.py | 2 +- .../testing/integration/screen_diff.py | 4 +- .../integration/tests/screen_diff_test.py | 5 ++- .../interactive/testing/pipeline_assertion.py | 5 +-- .../apache_beam/runners/interactive/utils.py | 4 +- .../portability/abstract_job_service.py | 2 +- .../runners/portability/artifact_service.py | 1 + .../runners/portability/flink_runner.py | 4 +- .../runners/portability/flink_runner_test.py | 21 +++++---- .../portability/flink_uber_jar_job_server.py | 6 +-- .../portability/fn_api_runner/fn_runner.py | 10 ++--- .../fn_api_runner/fn_runner_test.py | 9 ++-- .../fn_api_runner/trigger_manager.py | 2 +- .../fn_api_runner/worker_handlers.py | 25 +++++------ .../runners/portability/job_server.py | 2 +- .../runners/portability/local_job_service.py | 9 ++-- .../runners/portability/portable_runner.py | 2 +- .../portability/portable_runner_test.py | 12 ++--- .../runners/portability/samza_runner_test.py | 5 +-- .../runners/portability/spark_runner.py | 4 +- .../runners/portability/spark_runner_test.py | 5 +-- .../portability/spark_uber_jar_job_server.py | 6 +-- .../spark_uber_jar_job_server_test.py | 1 + sdks/python/apache_beam/runners/runner.py | 2 +- .../runners/worker/bundle_processor.py | 15 +++---- .../apache_beam/runners/worker/data_plane.py | 13 +++--- .../apache_beam/runners/worker/log_handler.py | 4 +- .../apache_beam/runners/worker/logger.py | 4 +- .../apache_beam/runners/worker/opcounters.py | 4 +- .../apache_beam/runners/worker/operations.py | 1 + .../runners/worker/sideinputs_test.py | 2 +- .../runners/worker/statesampler.py | 14 +++--- .../nexmark/queries/winning_bids.py | 2 +- .../load_tests/co_group_by_key_test.py | 2 +- .../testing/load_tests/combine_test.py | 2 +- .../testing/load_tests/group_by_key_test.py | 2 +- .../load_tests/load_test_metrics_utils.py | 10 ++--- .../load_tests/microbenchmarks_test.py | 2 +- .../testing/load_tests/pardo_test.py | 2 +- .../testing/load_tests/sideinput_test.py | 2 +- .../testing/metric_result_matchers.py | 4 +- .../testing/metric_result_matchers_test.py | 2 +- .../apache_beam/testing/test_pipeline.py | 4 +- .../python/apache_beam/testing/test_stream.py | 2 +- sdks/python/apache_beam/testing/test_utils.py | 6 +-- sdks/python/apache_beam/testing/util.py | 6 +-- .../tools/fn_api_runner_microbenchmark.py | 2 +- .../tools/teststream_microbenchmark.py | 2 +- .../combinefn_lifecycle_pipeline.py | 2 +- .../apache_beam/transforms/combiners.py | 19 ++++---- sdks/python/apache_beam/transforms/core.py | 20 ++++----- .../transforms/deduplicate_test.py | 2 +- .../python/apache_beam/transforms/external.py | 6 +-- .../apache_beam/transforms/external_test.py | 4 +- .../transforms/periodicsequence.py | 4 +- .../apache_beam/transforms/ptransform.py | 25 +++++------ .../apache_beam/transforms/ptransform_test.py | 4 +- sdks/python/apache_beam/transforms/sql.py | 2 +- .../apache_beam/transforms/stats_test.py | 2 +- sdks/python/apache_beam/transforms/trigger.py | 6 +-- .../apache_beam/transforms/userstate.py | 2 +- sdks/python/apache_beam/transforms/util.py | 9 ++-- sdks/python/apache_beam/transforms/window.py | 2 +- .../apache_beam/transforms/window_test.py | 2 +- .../typehints/trivial_inference.py | 2 +- .../python/apache_beam/typehints/typecheck.py | 6 +-- .../python/apache_beam/typehints/typehints.py | 10 ++--- .../apache_beam/typehints/typehints_test.py | 2 +- sdks/python/apache_beam/utils/counters.py | 6 +-- sdks/python/apache_beam/utils/histogram.py | 2 +- sdks/python/apache_beam/utils/profiler.py | 2 +- sdks/python/apache_beam/utils/shared.py | 4 +- .../apache_beam/utils/subprocess_server.py | 6 +-- .../apache_beam/utils/thread_pool_executor.py | 2 +- sdks/python/gen_protos.py | 3 +- sdks/python/setup.py | 2 +- sdks/python/tox.ini | 4 +- .../documentation/patterns/cross-language.md | 2 +- 210 files changed, 552 insertions(+), 574 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cef59ccc61e4..2f12a3a08240 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -36,7 +36,7 @@ repos: - repo: https://github.com/pycqa/pylint # this rev is a release tag in the repo above and corresponds with a pylint # version. make sure this matches the version of pylint in tox.ini. - rev: pylint-2.4.3 + rev: v2.11.1 hooks: - id: pylint args: ["--rcfile=sdks/python/.pylintrc"] diff --git a/learning/katas/python/log_elements.py b/learning/katas/python/log_elements.py index 6989cee8d6ad..4477256da7d9 100644 --- a/learning/katas/python/log_elements.py +++ b/learning/katas/python/log_elements.py @@ -22,7 +22,7 @@ class LogElements(beam.PTransform): class _LoggingFn(beam.DoFn): def __init__(self, prefix='', with_timestamp=False, with_window=False): - super(LogElements._LoggingFn, self).__init__() + super().__init__() self.prefix = prefix self.with_timestamp = with_timestamp self.with_window = with_window @@ -43,7 +43,7 @@ def process(self, element, timestamp=beam.DoFn.TimestampParam, def __init__(self, label=None, prefix='', with_timestamp=False, with_window=False): - super(LogElements, self).__init__(label) + super().__init__(label) self.prefix = prefix self.with_timestamp = with_timestamp self.with_window = with_window diff --git a/sdks/python/.pylintrc b/sdks/python/.pylintrc index 1f92812c11b3..bead17d76cd7 100644 --- a/sdks/python/.pylintrc +++ b/sdks/python/.pylintrc @@ -80,7 +80,9 @@ bad-names= [MESSAGES CONTROL] disable = abstract-method, + abstract-class-instantiated, arguments-differ, + arguments-renamed, attribute-defined-outside-init, bad-builtin, bad-super-call, @@ -88,8 +90,10 @@ disable = broad-except, comparison-with-callable, consider-using-enumerate, + consider-using-f-string, consider-using-in, consider-using-sys-exit, + consider-using-with, cyclic-import, design, fixme, @@ -122,6 +126,7 @@ disable = not-callable, pointless-statement, protected-access, + raise-missing-from, #TODO(BEAM-12991) Enable and fix warnings raising-format-tuple, raising-non-exception, redefined-builtin, @@ -143,6 +148,7 @@ disable = unnecessary-pass, unneeded-not, unsubscriptable-object, + unspecified-encoding, #TODO(BEAM-12992) Enable explicit encoding unused-argument, unused-wildcard-import, useless-object-inheritance, diff --git a/sdks/python/apache_beam/coders/coder_impl.py b/sdks/python/apache_beam/coders/coder_impl.py index 618ee5544fca..668c56cdb0a3 100644 --- a/sdks/python/apache_beam/coders/coder_impl.py +++ b/sdks/python/apache_beam/coders/coder_impl.py @@ -747,7 +747,7 @@ def encode_to_stream(self, value, out, nested): def decode_from_stream(self, in_, nested): # type: (create_InputStream, bool) -> IntervalWindow if not TYPE_CHECKING: - global IntervalWindow + global IntervalWindow # pylint: disable=global-variable-not-assigned if IntervalWindow is None: from apache_beam.transforms.window import IntervalWindow # instantiating with None is not part of the public interface @@ -1390,8 +1390,7 @@ class ParamWindowedValueCoderImpl(WindowedValueCoderImpl): and pane info values during decoding when reconstructing the windowed value.""" def __init__(self, value_coder, window_coder, payload): - super(ParamWindowedValueCoderImpl, - self).__init__(value_coder, TimestampCoderImpl(), window_coder) + super().__init__(value_coder, TimestampCoderImpl(), window_coder) self._timestamp, self._windows, self._pane_info = self._from_proto( payload, window_coder) diff --git a/sdks/python/apache_beam/coders/coders.py b/sdks/python/apache_beam/coders/coders.py index 05f7a9d27377..1299a4aab376 100644 --- a/sdks/python/apache_beam/coders/coders.py +++ b/sdks/python/apache_beam/coders/coders.py @@ -730,7 +730,7 @@ def is_deterministic(self): return False def as_cloud_object(self, coders_context=None, is_pair_like=True): - value = super(_PickleCoderBase, self).as_cloud_object(coders_context) + value = super().as_cloud_object(coders_context) # We currently use this coder in places where we cannot infer the coder to # use for the value type in a more granular way. In places where the # service expects a pair, it checks for the "is_pair_like" key, in which @@ -767,7 +767,7 @@ def __hash__(self): class _MemoizingPickleCoder(_PickleCoderBase): """Coder using Python's pickle functionality with memoization.""" def __init__(self, cache_size=16): - super(_MemoizingPickleCoder, self).__init__() + super().__init__() self.cache_size = cache_size def _create_impl(self): @@ -867,7 +867,7 @@ def to_type_hint(self): return Any def as_cloud_object(self, coders_context=None, is_pair_like=True): - value = super(FastCoder, self).as_cloud_object(coders_context) + value = super().as_cloud_object(coders_context) # We currently use this coder in places where we cannot infer the coder to # use for the value type in a more granular way. In places where the # service expects a pair, it checks for the "is_pair_like" key, in which @@ -1088,7 +1088,7 @@ def as_cloud_object(self, coders_context=None): ], } - return super(TupleCoder, self).as_cloud_object(coders_context) + return super().as_cloud_object(coders_context) def _get_component_coders(self): # type: () -> Tuple[Coder, ...] @@ -1250,7 +1250,7 @@ class GlobalWindowCoder(SingletonCoder): """Coder for global windows.""" def __init__(self): from apache_beam.transforms import window - super(GlobalWindowCoder, self).__init__(window.GlobalWindow()) + super().__init__(window.GlobalWindow()) def as_cloud_object(self, coders_context=None): return { @@ -1357,7 +1357,7 @@ def __hash__(self): class ParamWindowedValueCoder(WindowedValueCoder): """A coder used for parameterized windowed values.""" def __init__(self, payload, components): - super(ParamWindowedValueCoder, self).__init__(components[0], components[1]) + super().__init__(components[0], components[1]) self.payload = payload def _create_impl(self): diff --git a/sdks/python/apache_beam/coders/coders_test.py b/sdks/python/apache_beam/coders/coders_test.py index 42fb3a3e5e8c..0eeb75dd6767 100644 --- a/sdks/python/apache_beam/coders/coders_test.py +++ b/sdks/python/apache_beam/coders/coders_test.py @@ -121,7 +121,7 @@ class AvroTestCoder(coders.AvroGenericCoder): """ def __init__(self): - super(AvroTestCoder, self).__init__(self.SCHEMA) + super().__init__(self.SCHEMA) class AvroTestRecord(AvroRecord): diff --git a/sdks/python/apache_beam/coders/coders_test_common.py b/sdks/python/apache_beam/coders/coders_test_common.py index 44ba7493e4b7..dbe24533b2ae 100644 --- a/sdks/python/apache_beam/coders/coders_test_common.py +++ b/sdks/python/apache_beam/coders/coders_test_common.py @@ -128,12 +128,12 @@ class CodersTest(unittest.TestCase): False, ] test_values = test_values_deterministic + [ - dict(), + {}, { 'a': 'b' }, { - 0: dict(), 1: len + 0: {}, 1: len }, set(), {'a', 'b'}, @@ -223,13 +223,12 @@ def test_deterministic_coder(self): tuple(self.test_values_deterministic)) with self.assertRaises(TypeError): - self.check_coder(deterministic_coder, dict()) + self.check_coder(deterministic_coder, {}) with self.assertRaises(TypeError): - self.check_coder(deterministic_coder, [1, dict()]) + self.check_coder(deterministic_coder, [1, {}]) self.check_coder( - coders.TupleCoder((deterministic_coder, coder)), (1, dict()), - ('a', [dict()])) + coders.TupleCoder((deterministic_coder, coder)), (1, {}), ('a', [{}])) self.check_coder(deterministic_coder, test_message.MessageA(field1='value')) @@ -260,7 +259,7 @@ def test_deterministic_coder(self): with self.assertRaises(TypeError): self.check_coder(deterministic_coder, DefinesGetState(1)) with self.assertRaises(TypeError): - self.check_coder(deterministic_coder, DefinesGetAndSetState(dict())) + self.check_coder(deterministic_coder, DefinesGetAndSetState({})) def test_dill_coder(self): cell_value = (lambda x: lambda: x)(0).__closure__[0] diff --git a/sdks/python/apache_beam/coders/row_coder_test.py b/sdks/python/apache_beam/coders/row_coder_test.py index b36aee78f270..ac7ef5f1e581 100644 --- a/sdks/python/apache_beam/coders/row_coder_test.py +++ b/sdks/python/apache_beam/coders/row_coder_test.py @@ -185,6 +185,7 @@ def test_overflows(self): ) # Encode max+1/min-1 ints to make sure they DO throw an error + # pylint: disable=cell-var-from-loop for case in overflow: self.assertRaises(OverflowError, lambda: c.encode(case)) diff --git a/sdks/python/apache_beam/coders/slow_stream.py b/sdks/python/apache_beam/coders/slow_stream.py index 23dc0ee24b01..cf71c3e8ac7f 100644 --- a/sdks/python/apache_beam/coders/slow_stream.py +++ b/sdks/python/apache_beam/coders/slow_stream.py @@ -92,7 +92,7 @@ class ByteCountingOutputStream(OutputStream): A pure Python implementation of stream.ByteCountingOutputStream.""" def __init__(self): # Note that we don't actually use any of the data initialized by our super. - super(ByteCountingOutputStream, self).__init__() + super().__init__() self.count = 0 def write(self, byte_array, nested=False): diff --git a/sdks/python/apache_beam/coders/standard_coders_test.py b/sdks/python/apache_beam/coders/standard_coders_test.py index 454939f5211c..acec22a46a55 100644 --- a/sdks/python/apache_beam/coders/standard_coders_test.py +++ b/sdks/python/apache_beam/coders/standard_coders_test.py @@ -151,13 +151,13 @@ class StandardCodersTest(unittest.TestCase): window_parser: windowed_value.create( value_parser(x['value']), x['timestamp'] * 1000, - tuple([window_parser(w) for w in x['windows']])), + tuple(window_parser(w) for w in x['windows'])), 'beam:coder:param_windowed_value:v1': lambda x, value_parser, window_parser: windowed_value.create( value_parser(x['value']), x['timestamp'] * 1000, - tuple([window_parser(w) for w in x['windows']]), + tuple(window_parser(w) for w in x['windows']), PaneInfo( x['pane']['is_first'], x['pane']['is_last'], @@ -170,7 +170,7 @@ class StandardCodersTest(unittest.TestCase): user_key=value_parser(x['userKey']), dynamic_timer_tag=x['dynamicTimerTag'], clear_bit=x['clearBit'], - windows=tuple([window_parser(w) for w in x['windows']]), + windows=tuple(window_parser(w) for w in x['windows']), fire_timestamp=None, hold_timestamp=None, paneinfo=None) if x['clearBit'] else userstate.Timer( @@ -179,7 +179,7 @@ class StandardCodersTest(unittest.TestCase): clear_bit=x['clearBit'], fire_timestamp=Timestamp(micros=x['fireTimestamp'] * 1000), hold_timestamp=Timestamp(micros=x['holdTimestamp'] * 1000), - windows=tuple([window_parser(w) for w in x['windows']]), + windows=tuple(window_parser(w) for w in x['windows']), paneinfo=PaneInfo( x['pane']['is_first'], x['pane']['is_last'], diff --git a/sdks/python/apache_beam/dataframe/doctests.py b/sdks/python/apache_beam/dataframe/doctests.py index 45171db4da1e..a7cff5af5abe 100644 --- a/sdks/python/apache_beam/dataframe/doctests.py +++ b/sdks/python/apache_beam/dataframe/doctests.py @@ -68,7 +68,7 @@ def __init__(self, pandas_obj, test_env): def __call__(self, *args, **kwargs): result = self._pandas_obj(*args, **kwargs) - if type(result) in DeferredBase._pandas_type_map.keys(): + if type(result) in DeferredBase._pandas_type_map: placeholder = expressions.PlaceholderExpression(result.iloc[0:0]) self._test_env._inputs[placeholder] = result return DeferredBase.wrap(placeholder) @@ -322,8 +322,7 @@ def check_output(self, want, got, optionflags): self.reset() want, got = self.fix(want, got) - return super(_DeferrredDataframeOutputChecker, - self).check_output(want, got, optionflags) + return super().check_output(want, got, optionflags) def output_difference(self, example, got, optionflags): want, got = self.fix(example.want, got) @@ -335,8 +334,7 @@ def output_difference(self, example, got, optionflags): example.lineno, example.indent, example.options) - return super(_DeferrredDataframeOutputChecker, - self).output_difference(example, got, optionflags) + return super().output_difference(example, got, optionflags) class BeamDataframeDoctestRunner(doctest.DocTestRunner): @@ -374,7 +372,7 @@ def to_callable(cond): for test, examples in (skip or {}).items() } - super(BeamDataframeDoctestRunner, self).__init__( + super().__init__( checker=_DeferrredDataframeOutputChecker(self._test_env, use_beam), **kwargs) self.success = 0 @@ -412,7 +410,7 @@ def run(self, test, **kwargs): # Don't fail doctests that raise this error. example.exc_msg = '|'.join(allowed_exceptions) with self._test_env.context(): - result = super(BeamDataframeDoctestRunner, self).run(test, **kwargs) + result = super().run(test, **kwargs) # Can't add attributes to builtin result. result = AugmentedTestResults(result.failed, result.attempted) result.summary = self.summary() @@ -444,14 +442,13 @@ def extract_concise_reason(got, expected_exc): # use the wrong previous value. del test.globs[var] - return super(BeamDataframeDoctestRunner, - self).report_success(out, test, example, got) + return super().report_success(out, test, example, got) def fake_pandas_module(self): return self._test_env.fake_pandas_module() def summarize(self): - super(BeamDataframeDoctestRunner, self).summarize() + super().summarize() self.summary().summarize() def summary(self): diff --git a/sdks/python/apache_beam/dataframe/doctests_test.py b/sdks/python/apache_beam/dataframe/doctests_test.py index 1adff65c0d0f..df24213c8716 100644 --- a/sdks/python/apache_beam/dataframe/doctests_test.py +++ b/sdks/python/apache_beam/dataframe/doctests_test.py @@ -234,6 +234,7 @@ def test_failed_assignment(self): def test_rst_ipython(self): try: + # pylint: disable=unused-import import IPython except ImportError: raise unittest.SkipTest('IPython not available') diff --git a/sdks/python/apache_beam/dataframe/expressions.py b/sdks/python/apache_beam/dataframe/expressions.py index c8960c395cb3..97997a42b3d3 100644 --- a/sdks/python/apache_beam/dataframe/expressions.py +++ b/sdks/python/apache_beam/dataframe/expressions.py @@ -69,7 +69,7 @@ def is_scalar(expr): if expr not in self._bindings: if is_scalar(expr) or not expr.args(): - result = super(PartitioningSession, self).evaluate(expr) + result = super().evaluate(expr) else: scaler_args = [arg for arg in expr.args() if is_scalar(arg)] @@ -260,7 +260,7 @@ def __init__( proxy: A proxy object with the type expected to be bound to this expression. Used for type checking at pipeline construction time. """ - super(PlaceholderExpression, self).__init__('placeholder', proxy) + super().__init__('placeholder', proxy) self._reference = reference def placeholders(self): @@ -296,7 +296,7 @@ def __init__( """ if proxy is None: proxy = value - super(ConstantExpression, self).__init__('constant', proxy) + super().__init__('constant', proxy) self._value = value def placeholders(self): @@ -357,7 +357,7 @@ def __init__( args = tuple(args) if proxy is None: proxy = func(*(arg.proxy() for arg in args)) - super(ComputedExpression, self).__init__(name, proxy, _id) + super().__init__(name, proxy, _id) self._func = func self._args = args self._requires_partition_by = requires_partition_by @@ -409,5 +409,5 @@ def allow_non_parallel_operations(allow=True): class NonParallelOperation(Exception): def __init__(self, msg): - super(NonParallelOperation, self).__init__(self, msg) + super().__init__(self, msg) self.msg = msg diff --git a/sdks/python/apache_beam/dataframe/frame_base.py b/sdks/python/apache_beam/dataframe/frame_base.py index 4bb9ddf807f3..b1b0b8597212 100644 --- a/sdks/python/apache_beam/dataframe/frame_base.py +++ b/sdks/python/apache_beam/dataframe/frame_base.py @@ -64,7 +64,7 @@ def get(ix): requires_partition_by=partitionings.Arbitrary(), preserves_partition_by=partitionings.Singleton()) - return tuple([cls.wrap(get(ix)) for ix in range(len(expr.proxy()))]) + return tuple(cls.wrap(get(ix)) for ix in range(len(expr.proxy()))) elif proxy_type in cls._pandas_type_map: wrapper_type = cls._pandas_type_map[proxy_type] else: @@ -641,4 +641,4 @@ def __init__(self, msg, reason=None): if 'url' in reason_data: msg = f"{msg}\nFor more information see {reason_data['url']}." - super(WontImplementError, self).__init__(msg) + super().__init__(msg) diff --git a/sdks/python/apache_beam/dataframe/frames.py b/sdks/python/apache_beam/dataframe/frames.py index 606002804025..5a0e82670982 100644 --- a/sdks/python/apache_beam/dataframe/frames.py +++ b/sdks/python/apache_beam/dataframe/frames.py @@ -3463,7 +3463,7 @@ def __init__(self, expr, kwargs, :param grouping_indexes: list of index names (or index level numbers) to be grouped. :param kwargs: Keywords args passed to the original groupby(..) call.""" - super(DeferredGroupBy, self).__init__(expr) + super().__init__(expr) self._ungrouped = ungrouped self._ungrouped_with_index = ungrouped_with_index self._projection = projection diff --git a/sdks/python/apache_beam/dataframe/io.py b/sdks/python/apache_beam/dataframe/io.py index 5ef8e2c43114..9cf66eea7d9a 100644 --- a/sdks/python/apache_beam/dataframe/io.py +++ b/sdks/python/apache_beam/dataframe/io.py @@ -353,7 +353,7 @@ def __init__(self, args, kwargs, read_chunk_size=_DEFAULT_BYTES_CHUNKSIZE): 'for splittable csv reads.') if kwargs.get('skipfooter', 0): raise ValueError('Splittablility incompatible with skipping footers.') - super(_CsvSplitter, self).__init__( + super().__init__( _maybe_encode(kwargs.get('lineterminator', b'\n')), _DEFAULT_BYTES_CHUNKSIZE) self._kwargs = kwargs diff --git a/sdks/python/apache_beam/dataframe/io_test.py b/sdks/python/apache_beam/dataframe/io_test.py index 060eebfc9d9f..d525b40c7eb1 100644 --- a/sdks/python/apache_beam/dataframe/io_test.py +++ b/sdks/python/apache_beam/dataframe/io_test.py @@ -230,7 +230,7 @@ def test_truncating_filehandle(self): min(len(s) for s in splits), len(numbers) * 0.9**20 * 0.1) @parameterized.expand([ - ('defaults', dict()), + ('defaults', {}), ('header', dict(header=1)), ('multi_header', dict(header=[0, 1])), ('multi_header', dict(header=[0, 1, 4])), diff --git a/sdks/python/apache_beam/dataframe/pandas_top_level_functions.py b/sdks/python/apache_beam/dataframe/pandas_top_level_functions.py index 443843e629b6..39df3f25a2e8 100644 --- a/sdks/python/apache_beam/dataframe/pandas_top_level_functions.py +++ b/sdks/python/apache_beam/dataframe/pandas_top_level_functions.py @@ -38,7 +38,7 @@ def wrapper(target, *args, **kwargs): def _maybe_wrap_constant_expr(res): - if type(res) in frame_base.DeferredBase._pandas_type_map.keys(): + if type(res) in frame_base.DeferredBase._pandas_type_map: return frame_base.DeferredBase.wrap( expressions.ConstantExpression(res, res[0:0])) else: diff --git a/sdks/python/apache_beam/dataframe/partitionings.py b/sdks/python/apache_beam/dataframe/partitionings.py index 9891e71ef4f9..bb8c994ca909 100644 --- a/sdks/python/apache_beam/dataframe/partitionings.py +++ b/sdks/python/apache_beam/dataframe/partitionings.py @@ -195,7 +195,6 @@ def shuffled(seq): random.shuffle(seq) return seq - # pylint: disable=range-builtin-not-iterating part = pd.Series(shuffled(range(len(df))), index=df.index) % num_partitions for k in range(num_partitions): yield k, df[part == k] diff --git a/sdks/python/apache_beam/dataframe/partitionings_test.py b/sdks/python/apache_beam/dataframe/partitionings_test.py index b60aa6707eea..c0fa8a9dc637 100644 --- a/sdks/python/apache_beam/dataframe/partitionings_test.py +++ b/sdks/python/apache_beam/dataframe/partitionings_test.py @@ -24,7 +24,7 @@ class PartitioningsTest(unittest.TestCase): - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value multi_index_df = pd.DataFrame({ 'shape': ['dodecahedron', 'icosahedron'] * 12, diff --git a/sdks/python/apache_beam/dataframe/transforms.py b/sdks/python/apache_beam/dataframe/transforms.py index b698448b3bbd..9b0c4fb14836 100644 --- a/sdks/python/apache_beam/dataframe/transforms.py +++ b/sdks/python/apache_beam/dataframe/transforms.py @@ -109,7 +109,7 @@ def expand(self, input_pcolls): input_dict = _flatten(input_pcolls) # type: Dict[Any, PCollection] proxies = _flatten(self._proxy) if self._proxy is not None else { tag: None - for tag in input_dict.keys() + for tag in input_dict } input_frames = { k: convert.to_dataframe(pc, proxies[k]) diff --git a/sdks/python/apache_beam/examples/avro_bitcoin.py b/sdks/python/apache_beam/examples/avro_bitcoin.py index df7fad657d66..9b851a8c6e38 100644 --- a/sdks/python/apache_beam/examples/avro_bitcoin.py +++ b/sdks/python/apache_beam/examples/avro_bitcoin.py @@ -43,7 +43,7 @@ class BitcoinTxnCountDoFn(beam.DoFn): """Count inputs and outputs per transaction""" def __init__(self): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(BitcoinTxnCountDoFn, self).__init__() + # super().__init__() beam.DoFn.__init__(self) self.txn_counter = Metrics.counter(self.__class__, 'txns') self.inputs_dist = Metrics.distribution(self.__class__, 'inputs_per_txn') diff --git a/sdks/python/apache_beam/examples/complete/autocomplete.py b/sdks/python/apache_beam/examples/complete/autocomplete.py index 3de8ff7cf423..4e4c5143b96b 100644 --- a/sdks/python/apache_beam/examples/complete/autocomplete.py +++ b/sdks/python/apache_beam/examples/complete/autocomplete.py @@ -59,7 +59,7 @@ def format_result(prefix_candidates): class TopPerPrefix(beam.PTransform): def __init__(self, count): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(TopPerPrefix, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self._count = count diff --git a/sdks/python/apache_beam/examples/complete/game/game_stats.py b/sdks/python/apache_beam/examples/complete/game/game_stats.py index e6b6e3528422..d6f5aab3e7b9 100644 --- a/sdks/python/apache_beam/examples/complete/game/game_stats.py +++ b/sdks/python/apache_beam/examples/complete/game/game_stats.py @@ -106,7 +106,7 @@ class ParseGameEventFn(beam.DoFn): """ def __init__(self): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ParseGameEventFn, self).__init__() + # super().__init__() beam.DoFn.__init__(self) self.num_parse_errors = Metrics.counter(self.__class__, 'num_parse_errors') @@ -132,7 +132,7 @@ class ExtractAndSumScore(beam.PTransform): """ def __init__(self, field): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ExtractAndSumScore, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.field = field @@ -172,7 +172,7 @@ def __init__(self, table_name, dataset, schema, project): project: Name of the Cloud project containing BigQuery table. """ # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(WriteToBigQuery, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.table_name = table_name self.dataset = dataset diff --git a/sdks/python/apache_beam/examples/complete/game/hourly_team_score.py b/sdks/python/apache_beam/examples/complete/game/hourly_team_score.py index 621a88c890a2..48a105af527d 100644 --- a/sdks/python/apache_beam/examples/complete/game/hourly_team_score.py +++ b/sdks/python/apache_beam/examples/complete/game/hourly_team_score.py @@ -106,7 +106,7 @@ class ParseGameEventFn(beam.DoFn): """ def __init__(self): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ParseGameEventFn, self).__init__() + # super().__init__() beam.DoFn.__init__(self) self.num_parse_errors = Metrics.counter(self.__class__, 'num_parse_errors') @@ -132,7 +132,7 @@ class ExtractAndSumScore(beam.PTransform): """ def __init__(self, field): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ExtractAndSumScore, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.field = field @@ -172,7 +172,7 @@ def __init__(self, table_name, dataset, schema, project): project: Name of the Cloud project containing BigQuery table. """ # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(WriteToBigQuery, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.table_name = table_name self.dataset = dataset @@ -197,7 +197,7 @@ def expand(self, pcoll): class HourlyTeamScore(beam.PTransform): def __init__(self, start_min, stop_min, window_duration): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(HourlyTeamScore, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.start_timestamp = str2timestamp(start_min) self.stop_timestamp = str2timestamp(stop_min) diff --git a/sdks/python/apache_beam/examples/complete/game/leader_board.py b/sdks/python/apache_beam/examples/complete/game/leader_board.py index 5e04d6042745..308e1e1cf5c0 100644 --- a/sdks/python/apache_beam/examples/complete/game/leader_board.py +++ b/sdks/python/apache_beam/examples/complete/game/leader_board.py @@ -115,7 +115,7 @@ class ParseGameEventFn(beam.DoFn): """ def __init__(self): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ParseGameEventFn, self).__init__() + # super().__init__() beam.DoFn.__init__(self) self.num_parse_errors = Metrics.counter(self.__class__, 'num_parse_errors') @@ -141,7 +141,7 @@ class ExtractAndSumScore(beam.PTransform): """ def __init__(self, field): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ExtractAndSumScore, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.field = field @@ -181,7 +181,7 @@ def __init__(self, table_name, dataset, schema, project): project: Name of the Cloud project containing BigQuery table. """ # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(WriteToBigQuery, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.table_name = table_name self.dataset = dataset @@ -211,7 +211,7 @@ class CalculateTeamScores(beam.PTransform): """ def __init__(self, team_window_duration, allowed_lateness): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(CalculateTeamScores, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.team_window_duration = team_window_duration * 60 self.allowed_lateness_seconds = allowed_lateness * 60 @@ -243,7 +243,7 @@ class CalculateUserScores(beam.PTransform): """ def __init__(self, allowed_lateness): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(CalculateUserScores, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.allowed_lateness_seconds = allowed_lateness * 60 diff --git a/sdks/python/apache_beam/examples/complete/game/user_score.py b/sdks/python/apache_beam/examples/complete/game/user_score.py index a87f2213ecc4..6a97d7e2ed3b 100644 --- a/sdks/python/apache_beam/examples/complete/game/user_score.py +++ b/sdks/python/apache_beam/examples/complete/game/user_score.py @@ -79,7 +79,7 @@ class ParseGameEventFn(beam.DoFn): """ def __init__(self): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ParseGameEventFn, self).__init__() + # super().__init__() beam.DoFn.__init__(self) self.num_parse_errors = Metrics.counter(self.__class__, 'num_parse_errors') @@ -106,7 +106,7 @@ class ExtractAndSumScore(beam.PTransform): """ def __init__(self, field): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ExtractAndSumScore, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.field = field diff --git a/sdks/python/apache_beam/examples/complete/top_wikipedia_sessions.py b/sdks/python/apache_beam/examples/complete/top_wikipedia_sessions.py index 572775871b4a..7064a5add13c 100644 --- a/sdks/python/apache_beam/examples/complete/top_wikipedia_sessions.py +++ b/sdks/python/apache_beam/examples/complete/top_wikipedia_sessions.py @@ -117,7 +117,7 @@ class ComputeTopSessions(beam.PTransform): """Computes the top user sessions for each month.""" def __init__(self, sampling_threshold): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(ComputeTopSessions, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.sampling_threshold = sampling_threshold diff --git a/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py b/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py index ffe82caba1ac..e3ea1447630f 100644 --- a/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py +++ b/sdks/python/apache_beam/examples/cookbook/bigtableio_it_test.py @@ -68,7 +68,7 @@ class GenerateTestRows(beam.PTransform): """ def __init__(self, number, project_id=None, instance_id=None, table_id=None): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(WriteToBigTable, self).__init__() + # super().__init__() beam.PTransform.__init__(self) self.number = number self.rand = random.choice(string.ascii_letters + string.digits) diff --git a/sdks/python/apache_beam/examples/fastavro_it_test.py b/sdks/python/apache_beam/examples/fastavro_it_test.py index c9bb9881a13e..f25db8ee3faf 100644 --- a/sdks/python/apache_beam/examples/fastavro_it_test.py +++ b/sdks/python/apache_beam/examples/fastavro_it_test.py @@ -109,11 +109,11 @@ def test_avro_it(self): batch_size = self.test_pipeline.get_option('batch-size') batch_size = int(batch_size) if batch_size else 10000 - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value batches = range(int(num_records / batch_size)) def batch_indices(start): - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value return range(start * batch_size, (start + 1) * batch_size) # A `PCollection` with `num_records` avro records diff --git a/sdks/python/apache_beam/examples/flink/flink_streaming_impulse.py b/sdks/python/apache_beam/examples/flink/flink_streaming_impulse.py index badf40126fd5..fdd209a8efd9 100644 --- a/sdks/python/apache_beam/examples/flink/flink_streaming_impulse.py +++ b/sdks/python/apache_beam/examples/flink/flink_streaming_impulse.py @@ -27,9 +27,9 @@ import sys import apache_beam as beam -import apache_beam.transforms.window as window from apache_beam.io.flink.flink_streaming_impulse_source import FlinkStreamingImpulseSource from apache_beam.options.pipeline_options import PipelineOptions +from apache_beam.transforms import window from apache_beam.transforms.trigger import AccumulationMode from apache_beam.transforms.trigger import AfterProcessingTime from apache_beam.transforms.trigger import Repeatedly diff --git a/sdks/python/apache_beam/examples/snippets/snippets.py b/sdks/python/apache_beam/examples/snippets/snippets.py index b665e85d96cf..936d06ebd233 100644 --- a/sdks/python/apache_beam/examples/snippets/snippets.py +++ b/sdks/python/apache_beam/examples/snippets/snippets.py @@ -801,7 +801,7 @@ class _CountingSource(CountingSource): # [START model_custom_source_new_ptransform] class ReadFromCountingSource(PTransform): def __init__(self, count): - super(ReadFromCountingSource, self).__init__() + super().__init__() self._count = count def expand(self, pcoll): @@ -923,7 +923,7 @@ def close(self): class WriteToKVSink(PTransform): def __init__(self, simplekv, url, final_table_name): self._simplekv = simplekv - super(WriteToKVSink, self).__init__() + super().__init__() self._url = url self._final_table_name = final_table_name diff --git a/sdks/python/apache_beam/examples/snippets/snippets_test.py b/sdks/python/apache_beam/examples/snippets/snippets_test.py index 8f215a336177..940e9feadfaf 100644 --- a/sdks/python/apache_beam/examples/snippets/snippets_test.py +++ b/sdks/python/apache_beam/examples/snippets/snippets_test.py @@ -35,7 +35,6 @@ import parameterized import apache_beam as beam -import apache_beam.transforms.combiners as combiners from apache_beam import WindowInto from apache_beam import coders from apache_beam import pvalue @@ -51,6 +50,7 @@ from apache_beam.testing.test_stream import TestStream from apache_beam.testing.util import assert_that from apache_beam.testing.util import equal_to +from apache_beam.transforms import combiners from apache_beam.transforms.trigger import AccumulationMode from apache_beam.transforms.trigger import AfterAny from apache_beam.transforms.trigger import AfterCount diff --git a/sdks/python/apache_beam/examples/streaming_wordcount.py b/sdks/python/apache_beam/examples/streaming_wordcount.py index d276cfc26d54..9ae763df81a5 100644 --- a/sdks/python/apache_beam/examples/streaming_wordcount.py +++ b/sdks/python/apache_beam/examples/streaming_wordcount.py @@ -24,11 +24,11 @@ import logging import apache_beam as beam -import apache_beam.transforms.window as window from apache_beam.examples.wordcount_with_metrics import WordExtractingDoFn from apache_beam.options.pipeline_options import PipelineOptions from apache_beam.options.pipeline_options import SetupOptions from apache_beam.options.pipeline_options import StandardOptions +from apache_beam.transforms import window def run(argv=None, save_main_session=True): diff --git a/sdks/python/apache_beam/examples/streaming_wordcount_debugging.py b/sdks/python/apache_beam/examples/streaming_wordcount_debugging.py index 2df87f4aa045..f64e6fe1f772 100644 --- a/sdks/python/apache_beam/examples/streaming_wordcount_debugging.py +++ b/sdks/python/apache_beam/examples/streaming_wordcount_debugging.py @@ -40,13 +40,13 @@ import time import apache_beam as beam -import apache_beam.transforms.window as window from apache_beam.examples.wordcount import WordExtractingDoFn from apache_beam.options.pipeline_options import PipelineOptions from apache_beam.options.pipeline_options import SetupOptions from apache_beam.options.pipeline_options import StandardOptions from apache_beam.testing.util import assert_that from apache_beam.testing.util import equal_to_per_window +from apache_beam.transforms import window from apache_beam.transforms.core import ParDo diff --git a/sdks/python/apache_beam/examples/windowed_wordcount.py b/sdks/python/apache_beam/examples/windowed_wordcount.py index 861a14792148..7889f61b9a62 100644 --- a/sdks/python/apache_beam/examples/windowed_wordcount.py +++ b/sdks/python/apache_beam/examples/windowed_wordcount.py @@ -27,7 +27,7 @@ import logging import apache_beam as beam -import apache_beam.transforms.window as window +from apache_beam.transforms import window TABLE_SCHEMA = ( 'word:STRING, count:INTEGER, ' diff --git a/sdks/python/apache_beam/examples/wordcount_debugging.py b/sdks/python/apache_beam/examples/wordcount_debugging.py index d4195df43a8d..404c123161ea 100644 --- a/sdks/python/apache_beam/examples/wordcount_debugging.py +++ b/sdks/python/apache_beam/examples/wordcount_debugging.py @@ -60,7 +60,7 @@ class FilterTextFn(beam.DoFn): """A DoFn that filters for a specific key based on a regular expression.""" def __init__(self, pattern): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(FilterTextFn, self).__init__() + # super().__init__() beam.DoFn.__init__(self) self.pattern = pattern # A custom metric can track values in your pipeline as it runs. Those diff --git a/sdks/python/apache_beam/examples/wordcount_it_test.py b/sdks/python/apache_beam/examples/wordcount_it_test.py index 8ee49c706555..be8bbbfed8a6 100644 --- a/sdks/python/apache_beam/examples/wordcount_it_test.py +++ b/sdks/python/apache_beam/examples/wordcount_it_test.py @@ -97,8 +97,7 @@ def _run_wordcount_it(self, run_wordcount, **opts): # Register clean up before pipeline execution self.addCleanup(delete_files, [test_output + '*']) - publish_to_bq = bool( - test_pipeline.get_option('publish_to_big_query') or False) + publish_to_bq = bool(test_pipeline.get_option('publish_to_big_query')) # Start measure time for performance test start_time = time.time() diff --git a/sdks/python/apache_beam/examples/wordcount_with_metrics.py b/sdks/python/apache_beam/examples/wordcount_with_metrics.py index bf61476ca3d2..8e1dd057fc41 100644 --- a/sdks/python/apache_beam/examples/wordcount_with_metrics.py +++ b/sdks/python/apache_beam/examples/wordcount_with_metrics.py @@ -36,7 +36,7 @@ class WordExtractingDoFn(beam.DoFn): """Parse each line of input text into words.""" def __init__(self): # TODO(BEAM-6158): Revert the workaround once we can pickle super() on py3. - # super(WordExtractingDoFn, self).__init__() + # super().__init__() beam.DoFn.__init__(self) self.words_counter = Metrics.counter(self.__class__, 'words') self.word_lengths_counter = Metrics.counter(self.__class__, 'word_lengths') diff --git a/sdks/python/apache_beam/internal/gcp/auth.py b/sdks/python/apache_beam/internal/gcp/auth.py index b3dbe320ad69..ec8a4cd6737a 100644 --- a/sdks/python/apache_beam/internal/gcp/auth.py +++ b/sdks/python/apache_beam/internal/gcp/auth.py @@ -52,8 +52,7 @@ class _GceAssertionCredentials(GceAssertionCredentials): @retry.with_exponential_backoff( retry_filter=retry.retry_on_server_errors_and_timeout_filter) def _do_refresh_request(self, http_request): - return super(_GceAssertionCredentials, - self)._do_refresh_request(http_request) + return super()._do_refresh_request(http_request) def set_running_in_gce(worker_executing_project): diff --git a/sdks/python/apache_beam/internal/metrics/metric.py b/sdks/python/apache_beam/internal/metrics/metric.py index c149fe1ebebd..b76e895167a9 100644 --- a/sdks/python/apache_beam/internal/metrics/metric.py +++ b/sdks/python/apache_beam/internal/metrics/metric.py @@ -105,7 +105,7 @@ class DelegatingHistogram(Histogram): """Metrics Histogram that Delegates functionality to MetricsEnvironment.""" def __init__(self, metric_name, bucket_type, logger): # type: (MetricName, BucketType, Optional[MetricLogger]) -> None - super(Metrics.DelegatingHistogram, self).__init__(metric_name) + super().__init__(metric_name) self.metric_name = metric_name self.cell_type = HistogramCellFactory(bucket_type) self.logger = logger @@ -125,7 +125,7 @@ class MetricLogger(object): """ def __init__(self): # type: () -> None - self._metric = dict() # type: Dict[MetricName, MetricCell] + self._metric = {} # type: Dict[MetricName, MetricCell] self._lock = threading.Lock() self._last_logging_millis = int(time.time() * 1000) self.minimum_logging_frequency_msec = 180000 @@ -158,7 +158,7 @@ def log_metrics(self, reset_after_logging=False): logging_metric_info.append('%s: %s' % (name, cell.get_cumulative())) _LOGGER.info('\n'.join(logging_metric_info)) if reset_after_logging: - self._metric = dict() + self._metric = {} self._last_logging_millis = current_millis finally: self._lock.release() diff --git a/sdks/python/apache_beam/io/avroio.py b/sdks/python/apache_beam/io/avroio.py index 45a619c19bbb..e861d533ad67 100644 --- a/sdks/python/apache_beam/io/avroio.py +++ b/sdks/python/apache_beam/io/avroio.py @@ -143,7 +143,7 @@ def __init__( use_fastavro (bool); when set, use the `fastavro` library for IO, which is significantly faster, and is now the default. """ - super(ReadFromAvro, self).__init__() + super().__init__() self._source = _create_avro_source( file_pattern, min_bundle_size, @@ -578,7 +578,7 @@ def __init__( num_shards, shard_name_template, mime_type): - super(_BaseAvroSink, self).__init__( + super().__init__( file_path_prefix, file_name_suffix=file_name_suffix, num_shards=num_shards, @@ -592,7 +592,7 @@ def __init__( self._codec = codec def display_data(self): - res = super(_BaseAvroSink, self).display_data() + res = super().display_data() res['codec'] = str(self._codec) res['schema'] = str(self._schema) return res @@ -601,7 +601,7 @@ def display_data(self): class _AvroSink(_BaseAvroSink): """A sink for avro files using Avro. """ def open(self, temp_path): - file_handle = super(_AvroSink, self).open(temp_path) + file_handle = super().open(temp_path) return avro.datafile.DataFileWriter( file_handle, avro.io.DatumWriter(), self._schema, self._codec) @@ -612,7 +612,7 @@ def write_record(self, writer, value): class _FastAvroSink(_BaseAvroSink): """A sink for avro files using FastAvro. """ def open(self, temp_path): - file_handle = super(_FastAvroSink, self).open(temp_path) + file_handle = super().open(temp_path) return Writer(file_handle, self._schema, self._codec) def write_record(self, writer, value): diff --git a/sdks/python/apache_beam/io/avroio_test.py b/sdks/python/apache_beam/io/avroio_test.py index 2cb5c5cfae8c..dcd1cf76d4c8 100644 --- a/sdks/python/apache_beam/io/avroio_test.py +++ b/sdks/python/apache_beam/io/avroio_test.py @@ -75,7 +75,7 @@ class AvroBase(object): _temp_files = [] # type: List[str] def __init__(self, methodName='runTest'): - super(AvroBase, self).__init__(methodName) + super().__init__(methodName) self.RECORDS = RECORDS self.SCHEMA_STRING = ''' {"namespace": "example.avro", @@ -447,7 +447,7 @@ def test_sink_transform_snappy(self): 'See: BEAM-6522.') class TestAvro(AvroBase, unittest.TestCase): def __init__(self, methodName='runTest'): - super(TestAvro, self).__init__(methodName) + super().__init__(methodName) self.use_fastavro = False self.SCHEMA = Parse(self.SCHEMA_STRING) @@ -477,7 +477,7 @@ def _write_data( class TestFastAvro(AvroBase, unittest.TestCase): def __init__(self, methodName='runTest'): - super(TestFastAvro, self).__init__(methodName) + super().__init__(methodName) self.use_fastavro = True self.SCHEMA = parse_schema(json.loads(self.SCHEMA_STRING)) diff --git a/sdks/python/apache_beam/io/aws/s3filesystem.py b/sdks/python/apache_beam/io/aws/s3filesystem.py index fa26badd60cb..8a5e94e3fc76 100644 --- a/sdks/python/apache_beam/io/aws/s3filesystem.py +++ b/sdks/python/apache_beam/io/aws/s3filesystem.py @@ -42,7 +42,7 @@ def __init__(self, pipeline_options): Connection configuration is done by passing pipeline options. See :class:`~apache_beam.options.pipeline_options.S3Options`. """ - super(S3FileSystem, self).__init__(pipeline_options) + super().__init__(pipeline_options) self._options = pipeline_options @classmethod diff --git a/sdks/python/apache_beam/io/concat_source.py b/sdks/python/apache_beam/io/concat_source.py index 3872ccbe01ae..35ae6fe27317 100644 --- a/sdks/python/apache_beam/io/concat_source.py +++ b/sdks/python/apache_beam/io/concat_source.py @@ -91,7 +91,7 @@ def default_output_coder(self): # to produce the same coder. return self._source_bundles[0].source.default_output_coder() else: - return super(ConcatSource, self).default_output_coder() + return super().default_output_coder() class ConcatRangeTracker(iobase.RangeTracker): @@ -106,7 +106,7 @@ def __init__(self, start, end, source_bundles): end: end position, a tuple of (source_index, source_position) source_bundles: the list of source bundles in the ConcatSource """ - super(ConcatRangeTracker, self).__init__() + super().__init__() self._start = start self._end = end self._source_bundles = source_bundles diff --git a/sdks/python/apache_beam/io/external/generate_sequence.py b/sdks/python/apache_beam/io/external/generate_sequence.py index e7f56e751383..1f94a8d1d9e9 100644 --- a/sdks/python/apache_beam/io/external/generate_sequence.py +++ b/sdks/python/apache_beam/io/external/generate_sequence.py @@ -54,7 +54,7 @@ def __init__( elements_per_period=None, max_read_time=None, expansion_service=None): - super(GenerateSequence, self).__init__( + super().__init__( self.URN, ImplicitSchemaPayloadBuilder({ 'start': start, diff --git a/sdks/python/apache_beam/io/external/xlang_kafkaio_it_test.py b/sdks/python/apache_beam/io/external/xlang_kafkaio_it_test.py index 6da4567dc7bb..7f75e2bf3de1 100644 --- a/sdks/python/apache_beam/io/external/xlang_kafkaio_it_test.py +++ b/sdks/python/apache_beam/io/external/xlang_kafkaio_it_test.py @@ -73,7 +73,7 @@ def __init__(self, bootstrap_servers, topic, expansion_service=None): def build_write_pipeline(self, pipeline): _ = ( pipeline - | 'Generate' >> beam.Create(range(NUM_RECORDS)) # pylint: disable=range-builtin-not-iterating + | 'Generate' >> beam.Create(range(NUM_RECORDS)) # pylint: disable=bad-option-value | 'MakeKV' >> beam.Map(lambda x: (b'', str(x).encode())).with_output_types( typing.Tuple[bytes, bytes]) diff --git a/sdks/python/apache_beam/io/external/xlang_kinesisio_it_test.py b/sdks/python/apache_beam/io/external/xlang_kinesisio_it_test.py index 01ff279528bc..2817ea9f93b3 100644 --- a/sdks/python/apache_beam/io/external/xlang_kinesisio_it_test.py +++ b/sdks/python/apache_beam/io/external/xlang_kinesisio_it_test.py @@ -104,7 +104,7 @@ def run_kinesis_write(self): _ = ( p | 'Impulse' >> beam.Impulse() - | 'Generate' >> beam.FlatMap(lambda x: range(NUM_RECORDS)) # pylint: disable=range-builtin-not-iterating + | 'Generate' >> beam.FlatMap(lambda x: range(NUM_RECORDS)) # pylint: disable=bad-option-value | 'Map to bytes' >> beam.Map(lambda x: RECORD + str(x).encode()).with_output_types(bytes) | 'WriteToKinesis' >> WriteToKinesis( diff --git a/sdks/python/apache_beam/io/external/xlang_parquetio_test.py b/sdks/python/apache_beam/io/external/xlang_parquetio_test.py index e6c06be2d270..10470c1f5cff 100644 --- a/sdks/python/apache_beam/io/external/xlang_parquetio_test.py +++ b/sdks/python/apache_beam/io/external/xlang_parquetio_test.py @@ -78,7 +78,7 @@ class AvroTestCoder(coders.AvroGenericCoder): """ def __init__(self): - super(AvroTestCoder, self).__init__(self.SCHEMA) + super().__init__(self.SCHEMA) coders.registry.register_coder(AvroRecord, AvroTestCoder) diff --git a/sdks/python/apache_beam/io/external/xlang_snowflakeio_it_test.py b/sdks/python/apache_beam/io/external/xlang_snowflakeio_it_test.py index 052ea3156228..f78175a8696d 100644 --- a/sdks/python/apache_beam/io/external/xlang_snowflakeio_it_test.py +++ b/sdks/python/apache_beam/io/external/xlang_snowflakeio_it_test.py @@ -109,7 +109,7 @@ def user_data_mapper(test_row): _ = ( p | 'Impulse' >> beam.Impulse() - | 'Generate' >> beam.FlatMap(lambda x: range(NUM_RECORDS)) # pylint: disable=range-builtin-not-iterating + | 'Generate' >> beam.FlatMap(lambda x: range(NUM_RECORDS)) # pylint: disable=bad-option-value | 'Map to TestRow' >> beam.Map( lambda num: TestRow( num, num % 2 == 0, b"test" + str(num).encode())) diff --git a/sdks/python/apache_beam/io/filebasedsink_test.py b/sdks/python/apache_beam/io/filebasedsink_test.py index c75958ce744e..121bc479200f 100644 --- a/sdks/python/apache_beam/io/filebasedsink_test.py +++ b/sdks/python/apache_beam/io/filebasedsink_test.py @@ -81,7 +81,7 @@ def _create_temp_file(self, name='', suffix='', dir=None, content=None): class MyFileBasedSink(filebasedsink.FileBasedSink): def open(self, temp_path): # TODO: Fix main session pickling. - # file_handle = super(MyFileBasedSink, self).open(temp_path) + # file_handle = super().open(temp_path) file_handle = filebasedsink.FileBasedSink.open(self, temp_path) file_handle.write(b'[start]') return file_handle @@ -94,7 +94,7 @@ def write_encoded_record(self, file_handle, encoded_value): def close(self, file_handle): file_handle.write(b'[end]') # TODO: Fix main session pickling. - # file_handle = super(MyFileBasedSink, self).close(file_handle) + # file_handle = super().close(file_handle) file_handle = filebasedsink.FileBasedSink.close(self, file_handle) diff --git a/sdks/python/apache_beam/io/fileio_test.py b/sdks/python/apache_beam/io/fileio_test.py index 90086c967364..89fb2c8a8ac1 100644 --- a/sdks/python/apache_beam/io/fileio_test.py +++ b/sdks/python/apache_beam/io/fileio_test.py @@ -102,7 +102,7 @@ def test_match_files_one_directory_failure1(self): '%s%s' % (self._new_tempdir(), os.sep) ] - files = list() + files = [] files.append(self._create_temp_file(dir=directories[0])) files.append(self._create_temp_file(dir=directories[0])) @@ -122,7 +122,7 @@ def test_match_files_one_directory_failure2(self): '%s%s' % (self._new_tempdir(), os.sep) ] - files = list() + files = [] files.append(self._create_temp_file(dir=directories[0])) files.append(self._create_temp_file(dir=directories[0])) diff --git a/sdks/python/apache_beam/io/filesystem.py b/sdks/python/apache_beam/io/filesystem.py index 687ab043b1dc..92d55929309f 100644 --- a/sdks/python/apache_beam/io/filesystem.py +++ b/sdks/python/apache_beam/io/filesystem.py @@ -468,7 +468,7 @@ def __init__(self, msg, exception_details=None): the current state of the system. """ message = "%s with exceptions %s" % (msg, exception_details) - super(BeamIOError, self).__init__(message) + super().__init__(message) self.exception_details = exception_details diff --git a/sdks/python/apache_beam/io/filesystem_test.py b/sdks/python/apache_beam/io/filesystem_test.py index 83453a96d247..a2463352de39 100644 --- a/sdks/python/apache_beam/io/filesystem_test.py +++ b/sdks/python/apache_beam/io/filesystem_test.py @@ -42,7 +42,7 @@ class TestingFileSystem(FileSystem): def __init__(self, pipeline_options, has_dirs=False): - super(TestingFileSystem, self).__init__(pipeline_options) + super().__init__(pipeline_options) self._has_dirs = has_dirs self._files = {} @@ -499,7 +499,7 @@ def generate_random_line(): return b''.join(byte_list) def create_test_file(compression_type, lines): - filenames = list() + filenames = [] file_name = self._create_temp_file() if compression_type == CompressionTypes.BZIP2: compress_factory = bz2.BZ2File diff --git a/sdks/python/apache_beam/io/filesystemio.py b/sdks/python/apache_beam/io/filesystemio.py index 70f39daf661c..571d1f2d2699 100644 --- a/sdks/python/apache_beam/io/filesystemio.py +++ b/sdks/python/apache_beam/io/filesystemio.py @@ -214,7 +214,7 @@ def close(self): if not self.closed: self._uploader.finish() - super(UploaderStream, self).close() + super().close() def writable(self): return True diff --git a/sdks/python/apache_beam/io/gcp/__init__.py b/sdks/python/apache_beam/io/gcp/__init__.py index 87dc1c30ee18..f88a0117aa46 100644 --- a/sdks/python/apache_beam/io/gcp/__init__.py +++ b/sdks/python/apache_beam/io/gcp/__init__.py @@ -21,8 +21,8 @@ try: # pylint: disable=wrong-import-order, wrong-import-position # pylint: disable=ungrouped-imports - import apitools.base.py.transfer as transfer import email.generator as email_generator + from apitools.base.py import transfer class _WrapperNamespace(object): class BytesGenerator(email_generator.BytesGenerator): diff --git a/sdks/python/apache_beam/io/gcp/bigquery_read_perf_test.py b/sdks/python/apache_beam/io/gcp/bigquery_read_perf_test.py index 957028cb1e53..0b4cfe2ecbae 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_read_perf_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_read_perf_test.py @@ -80,7 +80,7 @@ class BigQueryReadPerfTest(LoadTest): def __init__(self): - super(BigQueryReadPerfTest, self).__init__() + super().__init__() self.input_dataset = self.pipeline.get_option('input_dataset') self.input_table = self.pipeline.get_option('input_table') self._check_for_input_data() diff --git a/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py b/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py index 3936923c22e3..1aafb1b60a85 100644 --- a/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py +++ b/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py @@ -68,7 +68,7 @@ class BigQueryWritePerfTest(LoadTest): def __init__(self): - super(BigQueryWritePerfTest, self).__init__() + super().__init__() self.output_dataset = self.pipeline.get_option('output_dataset') self.output_table = self.pipeline.get_option('output_table') diff --git a/sdks/python/apache_beam/io/gcp/bigtableio.py b/sdks/python/apache_beam/io/gcp/bigtableio.py index 0ae1f7c37f19..f41f802f733d 100644 --- a/sdks/python/apache_beam/io/gcp/bigtableio.py +++ b/sdks/python/apache_beam/io/gcp/bigtableio.py @@ -58,7 +58,7 @@ class _MutationsBatcher(MutationsBatcher): def __init__( self, table, flush_count=FLUSH_COUNT, max_row_bytes=MAX_ROW_BYTES): - super(_MutationsBatcher, self).__init__(table, flush_count, max_row_bytes) + super().__init__(table, flush_count, max_row_bytes) self.rows = [] def set_flush_callback(self, callback_fn): @@ -95,7 +95,7 @@ def __init__(self, project_id, instance_id, table_id): instance_id(str): GCP Instance to write the Rows table_id(str): GCP Table to write the `DirectRows` """ - super(_BigTableWriteFn, self).__init__() + super().__init__() self.beam_options = { 'project_id': project_id, 'instance_id': instance_id, @@ -192,7 +192,7 @@ def __init__(self, project_id=None, instance_id=None, table_id=None): instance_id(str): GCP Instance to write the Rows table_id(str): GCP Table to write the `DirectRows` """ - super(WriteToBigTable, self).__init__() + super().__init__() self.beam_options = { 'project_id': project_id, 'instance_id': instance_id, diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py b/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py index 34539ece7fa6..4ac2803619d8 100644 --- a/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py +++ b/sdks/python/apache_beam/io/gcp/datastore/v1new/datastoreio.py @@ -114,7 +114,7 @@ def __init__(self, query, num_splits=0): used to fetch entities. num_splits: (:class:`int`) (optional) Number of splits for the query. """ - super(ReadFromDatastore, self).__init__() + super().__init__() if not query.project: raise ValueError("query.project cannot be empty") @@ -168,7 +168,7 @@ def display_data(self): class _SplitQueryFn(DoFn): """A `DoFn` that splits a given query into multiple sub-queries.""" def __init__(self, num_splits): - super(ReadFromDatastore._SplitQueryFn, self).__init__() + super().__init__() self._num_splits = num_splits def process(self, query, *args, **kwargs): @@ -529,8 +529,7 @@ def __init__( estimate appropriate limits during ramp-up throttling. """ mutate_fn = WriteToDatastore._DatastoreWriteFn(project) - super(WriteToDatastore, - self).__init__(mutate_fn, throttle_rampup, hint_num_workers) + super().__init__(mutate_fn, throttle_rampup, hint_num_workers) class _DatastoreWriteFn(_Mutate.DatastoreMutateFn): def element_to_client_batch_item(self, element): @@ -583,8 +582,7 @@ def __init__( estimate appropriate limits during ramp-up throttling. """ mutate_fn = DeleteFromDatastore._DatastoreDeleteFn(project) - super(DeleteFromDatastore, - self).__init__(mutate_fn, throttle_rampup, hint_num_workers) + super().__init__(mutate_fn, throttle_rampup, hint_num_workers) class _DatastoreDeleteFn(_Mutate.DatastoreMutateFn): def element_to_client_batch_item(self, element): diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1new/query_splitter.py b/sdks/python/apache_beam/io/gcp/datastore/v1new/query_splitter.py index 8ac5b931e7a2..842579dfb40f 100644 --- a/sdks/python/apache_beam/io/gcp/datastore/v1new/query_splitter.py +++ b/sdks/python/apache_beam/io/gcp/datastore/v1new/query_splitter.py @@ -146,7 +146,7 @@ def __init__(self, id_or_name): def __lt__(self, other): if not isinstance(other, IdOrName): - return super(IdOrName, self).__lt__(other) + return super().__lt__(other) if self.id is not None: if other.id is None: @@ -161,7 +161,7 @@ def __lt__(self, other): def __eq__(self, other): if not isinstance(other, IdOrName): - return super(IdOrName, self).__eq__(other) + return super().__eq__(other) return self.id == other.id and self.name == other.name def __hash__(self): diff --git a/sdks/python/apache_beam/io/gcp/datastore/v1new/rampup_throttling_fn.py b/sdks/python/apache_beam/io/gcp/datastore/v1new/rampup_throttling_fn.py index bf54401e4957..034e2307b459 100644 --- a/sdks/python/apache_beam/io/gcp/datastore/v1new/rampup_throttling_fn.py +++ b/sdks/python/apache_beam/io/gcp/datastore/v1new/rampup_throttling_fn.py @@ -51,7 +51,7 @@ def __init__(self, num_workers, *unused_args, **unused_kwargs): num_workers: A hint for the expected number of workers, used to derive the local rate limit. """ - super(RampupThrottlingFn, self).__init__(*unused_args, **unused_kwargs) + super().__init__(*unused_args, **unused_kwargs) self._num_workers = num_workers self._successful_ops = util.MovingSum(window_ms=1000, bucket_ms=1000) self._first_instant = datetime.datetime.now() diff --git a/sdks/python/apache_beam/io/gcp/experimental/spannerio.py b/sdks/python/apache_beam/io/gcp/experimental/spannerio.py index d50b3a880c3e..fdd08a8b64c7 100644 --- a/sdks/python/apache_beam/io/gcp/experimental/spannerio.py +++ b/sdks/python/apache_beam/io/gcp/experimental/spannerio.py @@ -669,7 +669,7 @@ def expand(self, pbegin): return p def display_data(self): - res = dict() + res = {} sql = [] table = [] if self._read_operations is not None: diff --git a/sdks/python/apache_beam/io/gcp/experimental/spannerio_read_perf_test.py b/sdks/python/apache_beam/io/gcp/experimental/spannerio_read_perf_test.py index ae7c1ea8d765..18f6c29593e7 100644 --- a/sdks/python/apache_beam/io/gcp/experimental/spannerio_read_perf_test.py +++ b/sdks/python/apache_beam/io/gcp/experimental/spannerio_read_perf_test.py @@ -80,7 +80,7 @@ class SpannerReadPerfTest(LoadTest): def __init__(self): - super(SpannerReadPerfTest, self).__init__() + super().__init__() self.project = self.pipeline.get_option('project') self.spanner_instance = self.pipeline.get_option('spanner_instance') self.spanner_database = self.pipeline.get_option('spanner_database') diff --git a/sdks/python/apache_beam/io/gcp/experimental/spannerio_write_perf_test.py b/sdks/python/apache_beam/io/gcp/experimental/spannerio_write_perf_test.py index 707db81713cd..c61608ff6743 100644 --- a/sdks/python/apache_beam/io/gcp/experimental/spannerio_write_perf_test.py +++ b/sdks/python/apache_beam/io/gcp/experimental/spannerio_write_perf_test.py @@ -76,7 +76,7 @@ class SpannerWritePerfTest(LoadTest): TEST_DATABASE = None def __init__(self): - super(SpannerWritePerfTest, self).__init__() + super().__init__() self.project = self.pipeline.get_option('project') self.spanner_instance = self.pipeline.get_option('spanner_instance') self.spanner_database = self.pipeline.get_option('spanner_database') diff --git a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py index 0b20718c009d..e53ceef70c9f 100644 --- a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py +++ b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py @@ -328,6 +328,7 @@ def _delete_path(path): match_result = self.match([path_to_use])[0] statuses = gcsio.GcsIO().delete_batch( [m.path for m in match_result.metadata_list]) + # pylint: disable=used-before-assignment failures = [e for (_, e) in statuses if e is not None] if failures: raise failures[0] diff --git a/sdks/python/apache_beam/io/gcp/gcsio.py b/sdks/python/apache_beam/io/gcp/gcsio.py index b9fb15f655eb..1b312842dc15 100644 --- a/sdks/python/apache_beam/io/gcp/gcsio.py +++ b/sdks/python/apache_beam/io/gcp/gcsio.py @@ -53,9 +53,9 @@ try: # pylint: disable=wrong-import-order, wrong-import-position # pylint: disable=ungrouped-imports - import apitools.base.py.transfer as transfer from apitools.base.py.batch import BatchApiRequest from apitools.base.py.exceptions import HttpError + from apitools.base.py import transfer from apache_beam.internal.gcp import auth from apache_beam.io.gcp.internal.clients import storage except ImportError: diff --git a/sdks/python/apache_beam/io/gcp/gcsio_integration_test.py b/sdks/python/apache_beam/io/gcp/gcsio_integration_test.py index b06e374c1e8e..e13d993b0e80 100644 --- a/sdks/python/apache_beam/io/gcp/gcsio_integration_test.py +++ b/sdks/python/apache_beam/io/gcp/gcsio_integration_test.py @@ -141,7 +141,7 @@ def test_copy_rewrite_token(self): max_bytes_rewritten_per_call=50 * 1024 * 1024, src=self.INPUT_FILE_LARGE) # Verify that there was a multi-part rewrite. - self.assertTrue(any([not r.done for r in rewrite_responses])) + self.assertTrue(any(not r.done for r in rewrite_responses)) def _test_copy_batch( self, @@ -195,7 +195,7 @@ def test_copy_batch_rewrite_token(self): max_bytes_rewritten_per_call=50 * 1024 * 1024, src=self.INPUT_FILE_LARGE) # Verify that there was a multi-part rewrite. - self.assertTrue(any([not r.done for r in rewrite_responses])) + self.assertTrue(any(not r.done for r in rewrite_responses)) if __name__ == '__main__': diff --git a/sdks/python/apache_beam/io/gcp/internal/clients/bigquery/bigquery_v2_client.py b/sdks/python/apache_beam/io/gcp/internal/clients/bigquery/bigquery_v2_client.py index b695843cc821..146872f20c68 100644 --- a/sdks/python/apache_beam/io/gcp/internal/clients/bigquery/bigquery_v2_client.py +++ b/sdks/python/apache_beam/io/gcp/internal/clients/bigquery/bigquery_v2_client.py @@ -48,7 +48,7 @@ def __init__(self, url='', credentials=None, additional_http_headers=None, response_encoding=None): """Create a new bigquery handle.""" url = url or self.BASE_URL - super(BigqueryV2, self).__init__( + super().__init__( url, credentials=credentials, get_credentials=get_credentials, http=http, model=model, log_request=log_request, log_response=log_response, @@ -71,7 +71,7 @@ class DatasetsService(base_api.BaseApiService): _NAME = 'datasets' def __init__(self, client): - super(BigqueryV2.DatasetsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -237,7 +237,7 @@ class JobsService(base_api.BaseApiService): _NAME = 'jobs' def __init__(self, client): - super(BigqueryV2.JobsService, self).__init__(client) + super().__init__(client) self._upload_configs = { 'Insert': base_api.ApiUploadInfo( accept=['*/*'], @@ -415,7 +415,7 @@ class ModelsService(base_api.BaseApiService): _NAME = 'models' def __init__(self, client): - super(BigqueryV2.ModelsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -533,7 +533,7 @@ class ProjectsService(base_api.BaseApiService): _NAME = 'projects' def __init__(self, client): - super(BigqueryV2.ProjectsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -595,7 +595,7 @@ class RoutinesService(base_api.BaseApiService): _NAME = 'routines' def __init__(self, client): - super(BigqueryV2.RoutinesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -740,7 +740,7 @@ class RowAccessPoliciesService(base_api.BaseApiService): _NAME = 'rowAccessPolicies' def __init__(self, client): - super(BigqueryV2.RowAccessPoliciesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -777,7 +777,7 @@ class TabledataService(base_api.BaseApiService): _NAME = 'tabledata' def __init__(self, client): - super(BigqueryV2.TabledataService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -839,7 +839,7 @@ class TablesService(base_api.BaseApiService): _NAME = 'tables' def __init__(self, client): - super(BigqueryV2.TablesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } diff --git a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py index 9a6e426e9f02..cd840018ef22 100644 --- a/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py +++ b/sdks/python/apache_beam/io/gcp/internal/clients/storage/storage_v1_client.py @@ -48,7 +48,7 @@ def __init__(self, url='', credentials=None, additional_http_headers=None, response_encoding=None): """Create a new storage handle.""" url = url or self.BASE_URL - super(StorageV1, self).__init__( + super().__init__( url, credentials=credentials, get_credentials=get_credentials, http=http, model=model, log_request=log_request, log_response=log_response, num_retries=20, @@ -73,7 +73,7 @@ class BucketAccessControlsService(base_api.BaseApiService): _NAME = u'bucketAccessControls' def __init__(self, client): - super(StorageV1.BucketAccessControlsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -239,7 +239,7 @@ class BucketsService(base_api.BaseApiService): _NAME = u'buckets' def __init__(self, client): - super(StorageV1.BucketsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -509,7 +509,7 @@ class ChannelsService(base_api.BaseApiService): _NAME = u'channels' def __init__(self, client): - super(StorageV1.ChannelsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -545,7 +545,7 @@ class DefaultObjectAccessControlsService(base_api.BaseApiService): _NAME = u'defaultObjectAccessControls' def __init__(self, client): - super(StorageV1.DefaultObjectAccessControlsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -711,7 +711,7 @@ class NotificationsService(base_api.BaseApiService): _NAME = u'notifications' def __init__(self, client): - super(StorageV1.NotificationsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -825,7 +825,7 @@ class ObjectAccessControlsService(base_api.BaseApiService): _NAME = u'objectAccessControls' def __init__(self, client): - super(StorageV1.ObjectAccessControlsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -991,7 +991,7 @@ class ObjectsService(base_api.BaseApiService): _NAME = u'objects' def __init__(self, client): - super(StorageV1.ObjectsService, self).__init__(client) + super().__init__(client) self._upload_configs = { 'Insert': base_api.ApiUploadInfo( accept=['*/*'], @@ -1354,7 +1354,7 @@ class ProjectsServiceAccountService(base_api.BaseApiService): _NAME = u'projects_serviceAccount' def __init__(self, client): - super(StorageV1.ProjectsServiceAccountService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1390,6 +1390,6 @@ class ProjectsService(base_api.BaseApiService): _NAME = u'projects' def __init__(self, client): - super(StorageV1.ProjectsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } diff --git a/sdks/python/apache_beam/io/gcp/pubsub.py b/sdks/python/apache_beam/io/gcp/pubsub.py index 39312bf5b673..0048f04846fd 100644 --- a/sdks/python/apache_beam/io/gcp/pubsub.py +++ b/sdks/python/apache_beam/io/gcp/pubsub.py @@ -219,7 +219,7 @@ def __init__( timestamp is optional, and digits beyond the first three (i.e., time units smaller than milliseconds) may be ignored. """ - super(ReadFromPubSub, self).__init__() + super().__init__() self.with_attributes = with_attributes self._source = _PubSubSource( topic=topic, @@ -250,7 +250,7 @@ def ReadStringsFromPubSub(topic=None, subscription=None, id_label=None): class _ReadStringsFromPubSub(PTransform): """This class is deprecated. Use ``ReadFromPubSub`` instead.""" def __init__(self, topic=None, subscription=None, id_label=None): - super(_ReadStringsFromPubSub, self).__init__() + super().__init__() self.topic = topic self.subscription = subscription self.id_label = id_label @@ -278,7 +278,7 @@ def __init__(self, topic): Attributes: topic: Cloud Pub/Sub topic in the form "/topics//". """ - super(_WriteStringsToPubSub, self).__init__() + super().__init__() self.topic = topic def expand(self, pcoll): @@ -315,7 +315,7 @@ def __init__( timestamp_attribute: If set, will set an attribute for each Cloud Pub/Sub message with the given name and the message's publish time as the value. """ - super(WriteToPubSub, self).__init__() + super().__init__() self.with_attributes = with_attributes self.id_label = id_label self.timestamp_attribute = timestamp_attribute diff --git a/sdks/python/apache_beam/io/gcp/pubsub_io_perf_test.py b/sdks/python/apache_beam/io/gcp/pubsub_io_perf_test.py index 674f4e48abc9..8cefab350b0d 100644 --- a/sdks/python/apache_beam/io/gcp/pubsub_io_perf_test.py +++ b/sdks/python/apache_beam/io/gcp/pubsub_io_perf_test.py @@ -111,7 +111,7 @@ def _setup_pubsub(self): class PubsubWritePerfTest(PubsubIOPerfTest): def __init__(self): - super(PubsubWritePerfTest, self).__init__(WRITE_METRICS_NAMESPACE) + super().__init__(WRITE_METRICS_NAMESPACE) self._setup_env() self._setup_pubsub() self._setup_pipeline() @@ -144,7 +144,7 @@ def _setup_pipeline(self): self.pipeline = TestPipeline(options=options) def _setup_pubsub(self): - super(PubsubWritePerfTest, self)._setup_pubsub() + super()._setup_pubsub() _ = self.pub_client.create_topic(self.topic_name) _ = self.sub_client.create_subscription( @@ -155,7 +155,7 @@ def _setup_pubsub(self): class PubsubReadPerfTest(PubsubIOPerfTest): def __init__(self): - super(PubsubReadPerfTest, self).__init__(READ_METRICS_NAMESPACE) + super().__init__(READ_METRICS_NAMESPACE) self._setup_env() self._setup_pubsub() self._setup_pipeline() @@ -183,7 +183,7 @@ def test(self): | 'Write to Pubsub' >> beam.io.WriteToPubSub(self.matcher_topic_name)) def _setup_pubsub(self): - super(PubsubReadPerfTest, self)._setup_pubsub() + super()._setup_pubsub() _ = self.pub_client.create_topic(self.matcher_topic_name) _ = self.sub_client.create_subscription( diff --git a/sdks/python/apache_beam/io/gcp/spanner.py b/sdks/python/apache_beam/io/gcp/spanner.py index 60fae0405853..a5d3d1466224 100644 --- a/sdks/python/apache_beam/io/gcp/spanner.py +++ b/sdks/python/apache_beam/io/gcp/spanner.py @@ -246,7 +246,7 @@ def __init__( assert timestamp_bound_mode is TimestampBoundMode.MIN_READ_TIMESTAMP\ or timestamp_bound_mode is TimestampBoundMode.READ_TIMESTAMP - super(ReadFromSpanner, self).__init__( + super().__init__( self.URN, NamedTupleBasedPayloadBuilder( ReadFromSpannerSchema( diff --git a/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py b/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py index f30baade5a28..4504ba43b2c1 100644 --- a/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py +++ b/sdks/python/apache_beam/io/gcp/tests/bigquery_matcher.py @@ -148,8 +148,7 @@ def __init__(self, project, query, data): query: The query (string) to perform. data: List of tuples with the expected data. """ - super(BigqueryFullResultMatcher, - self).__init__(project, query, 'unused_checksum') + super().__init__(project, query, 'unused_checksum') self.expected_data = data self.actual_data = None @@ -191,8 +190,7 @@ class BigqueryFullResultStreamingMatcher(BigqueryFullResultMatcher): DEFAULT_TIMEOUT = 5 * 60 def __init__(self, project, query, data, timeout=DEFAULT_TIMEOUT): - super(BigqueryFullResultStreamingMatcher, - self).__init__(project, query, data) + super().__init__(project, query, data) self.timeout = timeout def _get_query_result(self): diff --git a/sdks/python/apache_beam/io/gcp/tests/xlang_spannerio_it_test.py b/sdks/python/apache_beam/io/gcp/tests/xlang_spannerio_it_test.py index eb0c71cbfa80..5d701052965b 100644 --- a/sdks/python/apache_beam/io/gcp/tests/xlang_spannerio_it_test.py +++ b/sdks/python/apache_beam/io/gcp/tests/xlang_spannerio_it_test.py @@ -227,7 +227,7 @@ def run_write_pipeline( _ = ( p | 'Impulse' >> beam.Impulse() - | 'Generate' >> beam.FlatMap(lambda x: range(num_rows)) # pylint: disable=range-builtin-not-iterating + | 'Generate' >> beam.FlatMap(lambda x: range(num_rows)) # pylint: disable=bad-option-value | 'Map to row' >> beam.Map(to_row_fn).with_output_types(row_type) | 'Write to Spanner' >> spanner_transform( instance_id=self.instance_id, diff --git a/sdks/python/apache_beam/io/hadoopfilesystem.py b/sdks/python/apache_beam/io/hadoopfilesystem.py index 041b9c54cb0d..046908d930eb 100644 --- a/sdks/python/apache_beam/io/hadoopfilesystem.py +++ b/sdks/python/apache_beam/io/hadoopfilesystem.py @@ -105,7 +105,7 @@ def __init__(self, pipeline_options): Connection configuration is done by passing pipeline options. See :class:`~apache_beam.options.pipeline_options.HadoopFileSystemOptions`. """ - super(HadoopFileSystem, self).__init__(pipeline_options) + super().__init__(pipeline_options) logging.getLogger('hdfs.client').setLevel(logging.WARN) if pipeline_options is None: raise ValueError('pipeline_options is not set') diff --git a/sdks/python/apache_beam/io/iobase.py b/sdks/python/apache_beam/io/iobase.py index 8e81da31fb5b..c0d34d866f6c 100644 --- a/sdks/python/apache_beam/io/iobase.py +++ b/sdks/python/apache_beam/io/iobase.py @@ -874,7 +874,7 @@ def __init__(self, source): Args: source: Data source to read from. """ - super(Read, self).__init__() + super().__init__() self.source = source @staticmethod @@ -1048,7 +1048,7 @@ def __init__(self, sink): Args: sink: Data sink to write to. """ - super(Write, self).__init__() + super().__init__() self.sink = sink def display_data(self): @@ -1083,7 +1083,7 @@ def to_runner_api_parameter( timestamp_attribute=self.sink.timestamp_attribute) return (common_urns.composites.PUBSUB_WRITE.urn, payload) else: - return super(Write, self).to_runner_api_parameter(context) + return super().to_runner_api_parameter(context) @staticmethod @ptransform.PTransform.register_urn( @@ -1117,7 +1117,7 @@ class WriteImpl(ptransform.PTransform): """Implements the writing of custom sinks.""" def __init__(self, sink): # type: (Sink) -> None - super(WriteImpl, self).__init__() + super().__init__() self.sink = sink def expand(self, pcoll): @@ -1647,7 +1647,7 @@ class SDFBoundedSourceReader(PTransform): """ def __init__(self, data_to_display=None): self._data_to_display = data_to_display or {} - super(SDFBoundedSourceReader, self).__init__() + super().__init__() def _create_sdf_bounded_source_dofn(self): class SDFBoundedSourceDoFn(core.DoFn): diff --git a/sdks/python/apache_beam/io/iobase_test.py b/sdks/python/apache_beam/io/iobase_test.py index bde05664e380..eb9617cfae34 100644 --- a/sdks/python/apache_beam/io/iobase_test.py +++ b/sdks/python/apache_beam/io/iobase_test.py @@ -80,10 +80,9 @@ def test_simple_source_split(self): split_bundles = list( self.sdf_restriction_provider.split(element, restriction)) self.assertTrue( - all([ + all( isinstance(bundle._source_bundle, SourceBundle) - for bundle in split_bundles - ])) + for bundle in split_bundles)) splits = ([( bundle._source_bundle.start_position, @@ -101,10 +100,9 @@ def test_concat_source_split(self): sdf_concat_restriction_provider.split( initial_concat_source, restriction)) self.assertTrue( - all([ + all( isinstance(bundle._source_bundle, SourceBundle) - for bundle in split_bundles - ])) + for bundle in split_bundles)) splits = ([( bundle._source_bundle.start_position, bundle._source_bundle.stop_position) for bundle in split_bundles]) diff --git a/sdks/python/apache_beam/io/jdbc.py b/sdks/python/apache_beam/io/jdbc.py index 060a4d8d177a..afd39e019433 100644 --- a/sdks/python/apache_beam/io/jdbc.py +++ b/sdks/python/apache_beam/io/jdbc.py @@ -183,7 +183,7 @@ def __init__( :param expansion_service: The address (host:port) of the ExpansionService. """ - super(WriteToJdbc, self).__init__( + super().__init__( self.URN, NamedTupleBasedPayloadBuilder( JdbcConfigSchema( @@ -269,7 +269,7 @@ def __init__( passed as list of strings :param expansion_service: The address (host:port) of the ExpansionService. """ - super(ReadFromJdbc, self).__init__( + super().__init__( self.URN, NamedTupleBasedPayloadBuilder( JdbcConfigSchema( diff --git a/sdks/python/apache_beam/io/kafka.py b/sdks/python/apache_beam/io/kafka.py index 3e28cfe59839..8d58dc3a95a2 100644 --- a/sdks/python/apache_beam/io/kafka.py +++ b/sdks/python/apache_beam/io/kafka.py @@ -171,7 +171,7 @@ def __init__( 'timestamp_policy should be one of ' '[ProcessingTime, CreateTime, LogAppendTime]') - super(ReadFromKafka, self).__init__( + super().__init__( self.URN_WITH_METADATA if with_metadata else self.URN_WITHOUT_METADATA, NamedTupleBasedPayloadBuilder( ReadFromKafkaSchema( @@ -234,7 +234,7 @@ def __init__( Default: 'org.apache.kafka.common.serialization.ByteArraySerializer'. :param expansion_service: The address (host:port) of the ExpansionService. """ - super(WriteToKafka, self).__init__( + super().__init__( self.URN, NamedTupleBasedPayloadBuilder( WriteToKafkaSchema( diff --git a/sdks/python/apache_beam/io/kinesis.py b/sdks/python/apache_beam/io/kinesis.py index 4a70f76d83d1..aca0dc13cbaf 100644 --- a/sdks/python/apache_beam/io/kinesis.py +++ b/sdks/python/apache_beam/io/kinesis.py @@ -153,7 +153,7 @@ def __init__( Example: {'CollectionMaxCount': '1000', 'ConnectTimeout': '10000'} :param expansion_service: The address (host:port) of the ExpansionService. """ - super(WriteToKinesis, self).__init__( + super().__init__( self.URN, NamedTupleBasedPayloadBuilder( WriteToKinesisSchema( @@ -277,7 +277,7 @@ def __init__( ): logging.warning('Provided timestamp emplaced not in the past.') - super(ReadDataFromKinesis, self).__init__( + super().__init__( self.URN, NamedTupleBasedPayloadBuilder( ReadFromKinesisSchema( diff --git a/sdks/python/apache_beam/io/mongodbio.py b/sdks/python/apache_beam/io/mongodbio.py index c6f7d97f4705..f6c1bdf445f3 100644 --- a/sdks/python/apache_beam/io/mongodbio.py +++ b/sdks/python/apache_beam/io/mongodbio.py @@ -299,8 +299,7 @@ def split( # for desired bundle size, if desired chunk size smaller than 1mb, use # MongoDB default split size of 1mb. - if desired_bundle_size_in_mb < 1: - desired_bundle_size_in_mb = 1 + desired_bundle_size_in_mb = max(desired_bundle_size_in_mb, 1) is_initial_split = start_position is None and stop_position is None start_position, stop_position = self._replace_none_positions( @@ -765,7 +764,7 @@ def _flush(self): self.batch = [] def display_data(self): - res = super(_WriteMongoFn, self).display_data() + res = super().display_data() res["database"] = self.db res["collection"] = self.coll res["batch_size"] = self.batch_size diff --git a/sdks/python/apache_beam/io/parquetio.py b/sdks/python/apache_beam/io/parquetio.py index 188aab7ce83d..872140d5d7f2 100644 --- a/sdks/python/apache_beam/io/parquetio.py +++ b/sdks/python/apache_beam/io/parquetio.py @@ -124,7 +124,7 @@ def __init__( 'a.b', 'a.c', and 'a.d.e' """ - super(ReadFromParquetBatched, self).__init__() + super().__init__() self._source = _create_parquet_source( file_pattern, min_bundle_size, @@ -190,7 +190,7 @@ def __init__( A column name may be a prefix of a nested field, e.g. 'a' will select 'a.b', 'a.c', and 'a.d.e' """ - super(ReadFromParquet, self).__init__() + super().__init__() self._source = _create_parquet_source( file_pattern, min_bundle_size, @@ -237,7 +237,7 @@ def __init__( name and the value being the actual data. If False, it only returns the data. """ - super(ReadAllFromParquetBatched, self).__init__() + super().__init__() source_from_file = partial( _create_parquet_source, min_bundle_size=min_bundle_size, @@ -305,7 +305,7 @@ class _ParquetSource(filebasedsource.FileBasedSource): """A source for reading Parquet files. """ def __init__(self, file_pattern, min_bundle_size, validate, columns): - super(_ParquetSource, self).__init__( + super().__init__( file_pattern=file_pattern, min_bundle_size=min_bundle_size, validate=validate) @@ -447,7 +447,7 @@ def __init__( Returns: A WriteToParquet transform usable for writing. """ - super(WriteToParquet, self).__init__() + super().__init__() self._sink = \ _create_parquet_sink( file_path_prefix, @@ -509,7 +509,7 @@ def __init__( num_shards, shard_name_template, mime_type): - super(_ParquetSink, self).__init__( + super().__init__( file_path_prefix, file_name_suffix=file_name_suffix, num_shards=num_shards, @@ -535,7 +535,7 @@ def __init__( self._file_handle = None def open(self, temp_path): - self._file_handle = super(_ParquetSink, self).open(temp_path) + self._file_handle = super().open(temp_path) return pq.ParquetWriter( self._file_handle, self._schema, @@ -565,7 +565,7 @@ def close(self, writer): self._file_handle = None def display_data(self): - res = super(_ParquetSink, self).display_data() + res = super().display_data() res['codec'] = str(self._codec) res['schema'] = str(self._schema) res['row_group_buffer_size'] = str(self._row_group_buffer_size) diff --git a/sdks/python/apache_beam/io/parquetio_it_test.py b/sdks/python/apache_beam/io/parquetio_it_test.py index 0d3cd0d65313..052b54f3ebfb 100644 --- a/sdks/python/apache_beam/io/parquetio_it_test.py +++ b/sdks/python/apache_beam/io/parquetio_it_test.py @@ -118,7 +118,7 @@ def _generate_data(self, p, output_prefix, init_size, data_size): class ProducerFn(DoFn): def __init__(self, number): - super(ProducerFn, self).__init__() + super().__init__() self._number = number self._string_index = 0 self._number_index = 0 diff --git a/sdks/python/apache_beam/io/range_trackers.py b/sdks/python/apache_beam/io/range_trackers.py index 33b15d5ecfa2..adadc1a7ae41 100644 --- a/sdks/python/apache_beam/io/range_trackers.py +++ b/sdks/python/apache_beam/io/range_trackers.py @@ -48,7 +48,7 @@ class OffsetRangeTracker(iobase.RangeTracker): OFFSET_INFINITY = float('inf') def __init__(self, start, end): - super(OffsetRangeTracker, self).__init__() + super().__init__() if start is None: raise ValueError('Start offset must not be \'None\'') diff --git a/sdks/python/apache_beam/io/restriction_trackers.py b/sdks/python/apache_beam/io/restriction_trackers.py index f2b3e1d03d2f..06b06fa1ed34 100644 --- a/sdks/python/apache_beam/io/restriction_trackers.py +++ b/sdks/python/apache_beam/io/restriction_trackers.py @@ -170,6 +170,6 @@ def try_split(self, fraction_of_remainder): # stubs in the baseclass. def __getattribute__(self, name): if name.startswith('_') or name in ('try_split', ): - return super(UnsplittableRestrictionTracker, self).__getattribute__(name) + return super().__getattribute__(name) else: return getattr(self._underling_tracker, name) diff --git a/sdks/python/apache_beam/io/source_test_utils_test.py b/sdks/python/apache_beam/io/source_test_utils_test.py index 6d3f2e3a4a85..081a6fcb60ca 100644 --- a/sdks/python/apache_beam/io/source_test_utils_test.py +++ b/sdks/python/apache_beam/io/source_test_utils_test.py @@ -21,7 +21,7 @@ import tempfile import unittest -import apache_beam.io.source_test_utils as source_test_utils +from apache_beam.io import source_test_utils from apache_beam.io.filebasedsource_test import LineSource diff --git a/sdks/python/apache_beam/io/textio.py b/sdks/python/apache_beam/io/textio.py index 277b993aabe7..bcec43903c12 100644 --- a/sdks/python/apache_beam/io/textio.py +++ b/sdks/python/apache_beam/io/textio.py @@ -118,7 +118,7 @@ def __init__(self, Please refer to documentation in class `ReadFromText` for the rest of the arguments. """ - super(_TextSource, self).__init__( + super().__init__( file_pattern, min_bundle_size, compression_type=compression_type, @@ -139,7 +139,7 @@ def __init__(self, self._header_matcher, self._header_processor = header_processor_fns def display_data(self): - parent_dd = super(_TextSource, self).display_data() + parent_dd = super().display_data() parent_dd['strip_newline'] = DisplayDataItem( self._strip_trailing_newlines, label='Strip Trailing New Lines') parent_dd['buffer_size'] = DisplayDataItem( @@ -327,8 +327,7 @@ def _read_record(self, file_to_read, read_buffer): class _TextSourceWithFilename(_TextSource): def read_records(self, file_name, range_tracker): - records = super(_TextSourceWithFilename, - self).read_records(file_name, range_tracker) + records = super().read_records(file_name, range_tracker) for record in records: yield (file_name, record) @@ -383,7 +382,7 @@ def __init__(self, Returns: A _TextSink object usable for writing. """ - super(_TextSink, self).__init__( + super().__init__( file_path_prefix, file_name_suffix=file_name_suffix, num_shards=num_shards, @@ -396,7 +395,7 @@ def __init__(self, self._footer = footer def open(self, temp_path): - file_handle = super(_TextSink, self).open(temp_path) + file_handle = super().open(temp_path) if self._header is not None: file_handle.write(coders.ToBytesCoder().encode(self._header)) if self._append_trailing_newlines: @@ -408,10 +407,10 @@ def close(self, file_handle): file_handle.write(coders.ToBytesCoder().encode(self._footer)) if self._append_trailing_newlines: file_handle.write(b'\n') - super(_TextSink, self).close(file_handle) + super().close(file_handle) def display_data(self): - dd_parent = super(_TextSink, self).display_data() + dd_parent = super().display_data() dd_parent['append_newline'] = DisplayDataItem( self._append_trailing_newlines, label='Append Trailing New Lines') return dd_parent @@ -493,7 +492,7 @@ def __init__( name and the value being the actual data. If False, it only returns the data. """ - super(ReadAllFromText, self).__init__(**kwargs) + super().__init__(**kwargs) source_from_file = partial( _create_text_source, min_bundle_size=min_bundle_size, @@ -564,7 +563,7 @@ def __init__( coder (~apache_beam.coders.coders.Coder): Coder used to decode each line. """ - super(ReadFromText, self).__init__(**kwargs) + super().__init__(**kwargs) self._source = self._source_class( file_pattern, min_bundle_size, diff --git a/sdks/python/apache_beam/io/textio_test.py b/sdks/python/apache_beam/io/textio_test.py index 6be437054780..3818d36fc051 100644 --- a/sdks/python/apache_beam/io/textio_test.py +++ b/sdks/python/apache_beam/io/textio_test.py @@ -29,10 +29,10 @@ import zlib import apache_beam as beam -import apache_beam.io.source_test_utils as source_test_utils from apache_beam import coders from apache_beam.io import ReadAllFromText from apache_beam.io import iobase +from apache_beam.io import source_test_utils from apache_beam.io.filesystem import CompressionTypes from apache_beam.io.textio import _TextSink as TextSink from apache_beam.io.textio import _TextSource as TextSource @@ -1018,7 +1018,7 @@ def test_read_after_splitting_skip_header(self): class TextSinkTest(unittest.TestCase): def setUp(self): - super(TextSinkTest, self).setUp() + super().setUp() self.lines = [b'Line %d' % d for d in range(100)] self.tempdir = tempfile.mkdtemp() self.path = self._create_temp_file() diff --git a/sdks/python/apache_beam/io/tfrecordio.py b/sdks/python/apache_beam/io/tfrecordio.py index e699756360e0..d3bb0f8acf3f 100644 --- a/sdks/python/apache_beam/io/tfrecordio.py +++ b/sdks/python/apache_beam/io/tfrecordio.py @@ -168,7 +168,7 @@ class _TFRecordSource(FileBasedSource): """ def __init__(self, file_pattern, coder, compression_type, validate): """Initialize a TFRecordSource. See ReadFromTFRecord for details.""" - super(_TFRecordSource, self).__init__( + super().__init__( file_pattern=file_pattern, compression_type=compression_type, splittable=False, @@ -218,7 +218,7 @@ def __init__( name and the value being the actual data. If False, it only returns the data. """ - super(ReadAllFromTFRecord, self).__init__() + super().__init__() source_from_file = partial( _create_tfrecordio_source, compression_type=compression_type, @@ -259,7 +259,7 @@ def __init__( Returns: A ReadFromTFRecord transform object. """ - super(ReadFromTFRecord, self).__init__() + super().__init__() self._source = _TFRecordSource( file_pattern, coder, compression_type, validate) @@ -283,7 +283,7 @@ def __init__( compression_type): """Initialize a TFRecordSink. See WriteToTFRecord for details.""" - super(_TFRecordSink, self).__init__( + super().__init__( file_path_prefix=file_path_prefix, coder=coder, file_name_suffix=file_name_suffix, @@ -330,7 +330,7 @@ def __init__( Returns: A WriteToTFRecord transform object. """ - super(WriteToTFRecord, self).__init__() + super().__init__() self._sink = _TFRecordSink( file_path_prefix, coder, diff --git a/sdks/python/apache_beam/metrics/cells.py b/sdks/python/apache_beam/metrics/cells.py index 0c6b8f3dc21a..640298978f15 100644 --- a/sdks/python/apache_beam/metrics/cells.py +++ b/sdks/python/apache_beam/metrics/cells.py @@ -106,7 +106,7 @@ class CounterCell(MetricCell): This class is thread safe. """ def __init__(self, *args): - super(CounterCell, self).__init__(*args) + super().__init__(*args) self.value = CounterAggregator.identity_element() def reset(self): @@ -170,7 +170,7 @@ class DistributionCell(MetricCell): This class is thread safe. """ def __init__(self, *args): - super(DistributionCell, self).__init__(*args) + super().__init__(*args) self.data = DistributionAggregator.identity_element() def reset(self): @@ -229,7 +229,7 @@ class GaugeCell(MetricCell): This class is thread safe. """ def __init__(self, *args): - super(GaugeCell, self).__init__(*args) + super().__init__(*args) self.data = GaugeAggregator.identity_element() def reset(self): diff --git a/sdks/python/apache_beam/metrics/execution.py b/sdks/python/apache_beam/metrics/execution.py index 940086044976..0b404de87296 100644 --- a/sdks/python/apache_beam/metrics/execution.py +++ b/sdks/python/apache_beam/metrics/execution.py @@ -78,7 +78,7 @@ def __init__(self, step, metric, labels=None): """ self.step = step self.metric = metric - self.labels = labels if labels else dict() + self.labels = labels if labels else {} def __eq__(self, other): return ( @@ -239,7 +239,7 @@ class MetricsContainer(object): def __init__(self, step_name): self.step_name = step_name self.lock = threading.Lock() - self.metrics = dict() # type: Dict[_TypedMetricName, MetricCell] + self.metrics = {} # type: Dict[_TypedMetricName, MetricCell] def get_counter(self, metric_name): # type: (MetricName) -> CounterCell diff --git a/sdks/python/apache_beam/metrics/metric.py b/sdks/python/apache_beam/metrics/metric.py index f4896e9decf1..fca1fd01b04c 100644 --- a/sdks/python/apache_beam/metrics/metric.py +++ b/sdks/python/apache_beam/metrics/metric.py @@ -122,7 +122,7 @@ class DelegatingCounter(Counter): """Metrics Counter that Delegates functionality to MetricsEnvironment.""" def __init__(self, metric_name, process_wide=False): # type: (MetricName, bool) -> None - super(Metrics.DelegatingCounter, self).__init__(metric_name) + super().__init__(metric_name) self.inc = MetricUpdater( # type: ignore[assignment] cells.CounterCell, metric_name, @@ -133,14 +133,14 @@ class DelegatingDistribution(Distribution): """Metrics Distribution Delegates functionality to MetricsEnvironment.""" def __init__(self, metric_name): # type: (MetricName) -> None - super(Metrics.DelegatingDistribution, self).__init__(metric_name) + super().__init__(metric_name) self.update = MetricUpdater(cells.DistributionCell, metric_name) # type: ignore[assignment] class DelegatingGauge(Gauge): """Metrics Gauge that Delegates functionality to MetricsEnvironment.""" def __init__(self, metric_name): # type: (MetricName) -> None - super(Metrics.DelegatingGauge, self).__init__(metric_name) + super().__init__(metric_name) self.set = MetricUpdater(cells.GaugeCell, metric_name) # type: ignore[assignment] diff --git a/sdks/python/apache_beam/metrics/monitoring_infos.py b/sdks/python/apache_beam/metrics/monitoring_infos.py index 2c909598f094..ba3100c84f31 100644 --- a/sdks/python/apache_beam/metrics/monitoring_infos.py +++ b/sdks/python/apache_beam/metrics/monitoring_infos.py @@ -189,7 +189,7 @@ def int64_counter(urn, metric, ptransform=None, pcollection=None, labels=None): ptransform: The ptransform id used as a label. pcollection: The pcollection id used as a label. """ - labels = labels or dict() + labels = labels or {} labels.update(create_labels(ptransform=ptransform, pcollection=pcollection)) if isinstance(metric, int): metric = coders.VarIntCoder().encode(metric) @@ -292,7 +292,7 @@ def create_monitoring_info(urn, type_urn, payload, labels=None): labels: The label dictionary to use in the MonitoringInfo. """ return metrics_pb2.MonitoringInfo( - urn=urn, type=type_urn, labels=labels or dict(), payload=payload) + urn=urn, type=type_urn, labels=labels or {}, payload=payload) def is_counter(monitoring_info_proto): diff --git a/sdks/python/apache_beam/ml/gcp/videointelligenceml.py b/sdks/python/apache_beam/ml/gcp/videointelligenceml.py index bc0aa0845923..fb0d7f045dde 100644 --- a/sdks/python/apache_beam/ml/gcp/videointelligenceml.py +++ b/sdks/python/apache_beam/ml/gcp/videointelligenceml.py @@ -95,7 +95,7 @@ def __init__( videointelligenceml.AnnotateVideo(features, context_side_input=beam.pvalue.AsDict(context_side_input))) """ - super(AnnotateVideo, self).__init__() + super().__init__() self.features = features self.location_id = location_id self.metadata = metadata @@ -120,7 +120,7 @@ class _VideoAnnotateFn(DoFn): (``google.cloud.videointelligence_v1.types.AnnotateVideoResponse``). """ def __init__(self, features, location_id, metadata, timeout): - super(_VideoAnnotateFn, self).__init__() + super().__init__() self._client = None self.features = features self.location_id = location_id @@ -186,7 +186,7 @@ def __init__(self, features, location_id=None, metadata=None, timeout=120): The time in seconds to wait for the response from the Video Intelligence API """ - super(AnnotateVideoWithContext, self).__init__( + super().__init__( features=features, location_id=location_id, metadata=metadata, @@ -210,7 +210,7 @@ class _VideoAnnotateFnWithContext(_VideoAnnotateFn): (``google.cloud.videointelligence_v1.types.AnnotateVideoResponse``). """ def __init__(self, features, location_id, metadata, timeout): - super(_VideoAnnotateFnWithContext, self).__init__( + super().__init__( features=features, location_id=location_id, metadata=metadata, diff --git a/sdks/python/apache_beam/ml/gcp/visionml.py b/sdks/python/apache_beam/ml/gcp/visionml.py index 0fb45ce72fdc..3e556b903c44 100644 --- a/sdks/python/apache_beam/ml/gcp/visionml.py +++ b/sdks/python/apache_beam/ml/gcp/visionml.py @@ -122,7 +122,7 @@ def __init__( metadata: (Optional[Sequence[Tuple[str, str]]]): Optional. Additional metadata that is provided to the method. """ - super(AnnotateImage, self).__init__() + super().__init__() self.features = features self.retry = retry self.timeout = timeout @@ -219,7 +219,7 @@ def __init__( metadata: (Optional[Sequence[Tuple[str, str]]]): Optional. Additional metadata that is provided to the method. """ - super(AnnotateImageWithContext, self).__init__( + super().__init__( features=features, retry=retry, timeout=timeout, @@ -265,7 +265,7 @@ class _ImageAnnotateFn(DoFn): Returns ``google.cloud.vision.types.BatchAnnotateImagesResponse``. """ def __init__(self, features, retry, timeout, client_options, metadata): - super(_ImageAnnotateFn, self).__init__() + super().__init__() self._client = None self.features = features self.retry = retry diff --git a/sdks/python/apache_beam/options/pipeline_options.py b/sdks/python/apache_beam/options/pipeline_options.py index bba56efe8d04..c8f31e395a67 100644 --- a/sdks/python/apache_beam/options/pipeline_options.py +++ b/sdks/python/apache_beam/options/pipeline_options.py @@ -125,7 +125,7 @@ def add_value_provider_argument(self, *args, **kwargs): def error(self, message): if message.startswith('ambiguous option: '): return - super(_BeamArgumentParser, self).error(message) + super().error(message) class PipelineOptions(HasDisplayData): @@ -391,7 +391,7 @@ def __getattr__(self, name): def __setattr__(self, name, value): if name in ('_flags', '_all_options', '_visible_options'): - super(PipelineOptions, self).__setattr__(name, value) + super().__setattr__(name, value) elif name in self._visible_option_list(): self._all_options[name] = value else: diff --git a/sdks/python/apache_beam/pipeline.py b/sdks/python/apache_beam/pipeline.py index 709b611f249b..3cf0a5162f2f 100644 --- a/sdks/python/apache_beam/pipeline.py +++ b/sdks/python/apache_beam/pipeline.py @@ -464,15 +464,15 @@ def visit_transform(self, transform_node): self.visit(InputOutputUpdater(self)) - for transform in output_replacements: - for tag, output in output_replacements[transform]: + for transform, output_replacement in output_replacements.items(): + for tag, output in output_replacement: transform.replace_output(output, tag=tag) - for transform in input_replacements: - transform.replace_inputs(input_replacements[transform]) + for transform, input_replacement in input_replacements.items(): + transform.replace_inputs(input_replacement) - for transform in side_input_replacements: - transform.replace_side_inputs(side_input_replacements[transform]) + for transform, side_input_replacement in side_input_replacements.items(): + transform.replace_side_inputs(side_input_replacement) def _check_replacement(self, override): # type: (PTransformOverride) -> None diff --git a/sdks/python/apache_beam/pipeline_test.py b/sdks/python/apache_beam/pipeline_test.py index b9a068debf08..8f8f44b78ad5 100644 --- a/sdks/python/apache_beam/pipeline_test.py +++ b/sdks/python/apache_beam/pipeline_test.py @@ -997,7 +997,7 @@ def expand(self, p): return p | beam.Create([None]) def display_data(self): # type: () -> dict - parent_dd = super(MyParentTransform, self).display_data() + parent_dd = super().display_data() parent_dd['p_dd_string'] = DisplayDataItem( 'p_dd_string_value', label='p_dd_string_label') parent_dd['p_dd_string_2'] = DisplayDataItem('p_dd_string_value_2') @@ -1011,7 +1011,7 @@ def expand(self, p): return p | beam.Create([None]) def display_data(self): # type: () -> dict - parent_dd = super(MyPTransform, self).display_data() + parent_dd = super().display_data() parent_dd['dd_string'] = DisplayDataItem( 'dd_string_value', label='dd_string_label') parent_dd['dd_string_2'] = DisplayDataItem('dd_string_value_2') diff --git a/sdks/python/apache_beam/pvalue.py b/sdks/python/apache_beam/pvalue.py index 2b593e4aaf98..6752931c68a8 100644 --- a/sdks/python/apache_beam/pvalue.py +++ b/sdks/python/apache_beam/pvalue.py @@ -492,14 +492,14 @@ class AsSingleton(AsSideInput): def __init__(self, pcoll, default_value=_NO_DEFAULT): # type: (PCollection, Any) -> None - super(AsSingleton, self).__init__(pcoll) + super().__init__(pcoll) self.default_value = default_value def __repr__(self): return 'AsSingleton(%s)' % self.pvalue def _view_options(self): - base = super(AsSingleton, self)._view_options() + base = super()._view_options() if self.default_value != AsSingleton._NO_DEFAULT: return dict(base, default=self.default_value) return base diff --git a/sdks/python/apache_beam/runners/common.py b/sdks/python/apache_beam/runners/common.py index ba1745193753..7d5e7e3b3763 100644 --- a/sdks/python/apache_beam/runners/common.py +++ b/sdks/python/apache_beam/runners/common.py @@ -112,7 +112,7 @@ def __init__(self, step_name, user_name, system_name): user_name: The full user-given name of the step (e.g. Foo/Bar/ParDo(Far)). system_name: The step name in the optimized graph (e.g. s2-1). """ - super(DataflowNameContext, self).__init__(step_name) + super().__init__(step_name) self.user_name = user_name self.system_name = system_name @@ -557,7 +557,7 @@ def __init__(self, signature # type: DoFnSignature ): # type: (...) -> None - super(SimpleInvoker, self).__init__(output_processor, signature) + super().__init__(output_processor, signature) self.process_method = signature.process_method.method_value def invoke_process(self, @@ -586,7 +586,7 @@ def __init__(self, user_state_context, # type: Optional[userstate.UserStateContext] bundle_finalizer_param # type: Optional[core._BundleFinalizerParam] ): - super(PerWindowInvoker, self).__init__(output_processor, signature) + super().__init__(output_processor, signature) self.side_inputs = side_inputs self.context = context self.process_method = signature.process_method.method_value diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_metrics.py b/sdks/python/apache_beam/runners/dataflow/dataflow_metrics.py index e5728e8f24b5..41c13b2074f5 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_metrics.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_metrics.py @@ -75,7 +75,7 @@ def __init__(self, dataflow_client=None, job_result=None, job_graph=None): job_graph: apiclient.Job instance to be able to translate between internal step names (e.g. "s2"), and user step names (e.g. "split"). """ - super(DataflowMetrics, self).__init__() + super().__init__() self._dataflow_client = dataflow_client self.job_result = job_result self._queried_after_termination = False @@ -122,7 +122,7 @@ def _get_metric_key(self, metric): """Populate the MetricKey object for a queried metric result.""" step = "" name = metric.name.name # Always extract a name - labels = dict() + labels = {} try: # Try to extract the user step name. # If ValueError is thrown within this try-block, it is because of # one of the following: diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py index bbaf52cdd809..1e2d1ec5bf1f 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py @@ -138,7 +138,7 @@ def is_fnapi_compatible(self): def apply(self, transform, input, options): self._maybe_add_unified_worker_missing_options(options) - return super(DataflowRunner, self).apply(transform, input, options) + return super().apply(transform, input, options) def _get_unique_step_name(self): self._unique_step_id += 1 @@ -1667,5 +1667,5 @@ def __repr__(self): class DataflowRuntimeException(Exception): """Indicates an error has occurred in running this pipeline.""" def __init__(self, msg, result): - super(DataflowRuntimeException, self).__init__(msg) + super().__init__(msg) self.result = result diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py index e7ce71b4f247..0f974a091183 100644 --- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py +++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner_test.py @@ -71,7 +71,7 @@ # composite transforms support display data. class SpecialParDo(beam.ParDo): def __init__(self, fn, now): - super(SpecialParDo, self).__init__(fn) + super().__init__(fn) self.fn = fn self.now = now diff --git a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py index fd5b001e4016..7e2ba13714ab 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py @@ -163,7 +163,7 @@ def __init__( self.proto.userAgent = dataflow.Environment.UserAgentValue() self.local = 'localhost' in self.google_cloud_options.dataflow_endpoint self._proto_pipeline = proto_pipeline - self._sdk_image_overrides = _sdk_image_overrides or dict() + self._sdk_image_overrides = _sdk_image_overrides or {} if self.google_cloud_options.service_account_email: self.proto.serviceAccountEmail = ( @@ -554,8 +554,7 @@ def _get_sdk_image_overrides(self, pipeline_options): worker_options = pipeline_options.view_as(WorkerOptions) sdk_overrides = worker_options.sdk_harness_container_image_overrides return ( - dict(s.split(',', 1) - for s in sdk_overrides) if sdk_overrides else dict()) + dict(s.split(',', 1) for s in sdk_overrides) if sdk_overrides else {}) @retry.with_exponential_backoff( retry_filter=retry.retry_on_server_errors_and_timeout_filter) @@ -1031,7 +1030,7 @@ def translate_scalar_counter_float(accumulator, metric_update_proto): class _LegacyDataflowStager(Stager): def __init__(self, dataflow_application_client): - super(_LegacyDataflowStager, self).__init__() + super().__init__() self._dataflow_application_client = dataflow_application_client def stage_artifact(self, local_path_to_artifact, artifact_name): diff --git a/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py b/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py index b63da701a751..59eaf583c45c 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/apiclient_test.py @@ -269,7 +269,7 @@ def test_dataflow_container_image_override(self): # Accessing non-public method for testing. apiclient.DataflowApplicationClient._apply_sdk_environment_overrides( - proto_pipeline, dict(), pipeline_options) + proto_pipeline, {}, pipeline_options) from apache_beam.utils import proto_utils found_override = False @@ -300,7 +300,7 @@ def test_non_apache_container_not_overridden(self): # Accessing non-public method for testing. apiclient.DataflowApplicationClient._apply_sdk_environment_overrides( - proto_pipeline, dict(), pipeline_options) + proto_pipeline, {}, pipeline_options) self.assertIsNotNone(2, len(proto_pipeline.components.environments)) @@ -336,7 +336,7 @@ def test_pipeline_sdk_not_overridden(self): # Accessing non-public method for testing. apiclient.DataflowApplicationClient._apply_sdk_environment_overrides( - proto_pipeline, dict(), pipeline_options) + proto_pipeline, {}, pipeline_options) self.assertIsNotNone(2, len(proto_pipeline.components.environments)) diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py index a48b9ea47c40..985934dd3653 100644 --- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py +++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py @@ -47,7 +47,7 @@ def __init__(self, url='', credentials=None, additional_http_headers=None, response_encoding=None): """Create a new dataflow handle.""" url = url or self.BASE_URL - super(DataflowV1b3, self).__init__( + super().__init__( url, credentials=credentials, get_credentials=get_credentials, http=http, model=model, log_request=log_request, log_response=log_response, @@ -83,7 +83,7 @@ class ProjectsCatalogTemplatesTemplateVersionsService(base_api.BaseApiService): _NAME = 'projects_catalogTemplates_templateVersions' def __init__(self, client): - super(DataflowV1b3.ProjectsCatalogTemplatesTemplateVersionsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -120,7 +120,7 @@ class ProjectsCatalogTemplatesService(base_api.BaseApiService): _NAME = 'projects_catalogTemplates' def __init__(self, client): - super(DataflowV1b3.ProjectsCatalogTemplatesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -265,7 +265,7 @@ class ProjectsJobsDebugService(base_api.BaseApiService): _NAME = 'projects_jobs_debug' def __init__(self, client): - super(DataflowV1b3.ProjectsJobsDebugService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -327,7 +327,7 @@ class ProjectsJobsMessagesService(base_api.BaseApiService): _NAME = 'projects_jobs_messages' def __init__(self, client): - super(DataflowV1b3.ProjectsJobsMessagesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -363,7 +363,7 @@ class ProjectsJobsWorkItemsService(base_api.BaseApiService): _NAME = 'projects_jobs_workItems' def __init__(self, client): - super(DataflowV1b3.ProjectsJobsWorkItemsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -425,7 +425,7 @@ class ProjectsJobsService(base_api.BaseApiService): _NAME = 'projects_jobs' def __init__(self, client): - super(DataflowV1b3.ProjectsJobsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -617,7 +617,7 @@ class ProjectsLocationsFlexTemplatesService(base_api.BaseApiService): _NAME = 'projects_locations_flexTemplates' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsFlexTemplatesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -653,7 +653,7 @@ class ProjectsLocationsJobsDebugService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_debug' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsDebugService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -715,7 +715,7 @@ class ProjectsLocationsJobsMessagesService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_messages' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsMessagesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -751,7 +751,7 @@ class ProjectsLocationsJobsSnapshotsService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_snapshots' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsSnapshotsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -787,7 +787,7 @@ class ProjectsLocationsJobsStagesService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_stages' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsStagesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -823,7 +823,7 @@ class ProjectsLocationsJobsWorkItemsService(base_api.BaseApiService): _NAME = 'projects_locations_jobs_workItems' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsWorkItemsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -885,7 +885,7 @@ class ProjectsLocationsJobsService(base_api.BaseApiService): _NAME = 'projects_locations_jobs' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsJobsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1077,7 +1077,7 @@ class ProjectsLocationsSnapshotsService(base_api.BaseApiService): _NAME = 'projects_locations_snapshots' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsSnapshotsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1165,7 +1165,7 @@ class ProjectsLocationsSqlService(base_api.BaseApiService): _NAME = 'projects_locations_sql' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsSqlService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1201,7 +1201,7 @@ class ProjectsLocationsTemplatesService(base_api.BaseApiService): _NAME = 'projects_locations_templates' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsTemplatesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1289,7 +1289,7 @@ class ProjectsLocationsService(base_api.BaseApiService): _NAME = 'projects_locations' def __init__(self, client): - super(DataflowV1b3.ProjectsLocationsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1325,7 +1325,7 @@ class ProjectsSnapshotsService(base_api.BaseApiService): _NAME = 'projects_snapshots' def __init__(self, client): - super(DataflowV1b3.ProjectsSnapshotsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1387,7 +1387,7 @@ class ProjectsTemplateVersionsService(base_api.BaseApiService): _NAME = 'projects_templateVersions' def __init__(self, client): - super(DataflowV1b3.ProjectsTemplateVersionsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1424,7 +1424,7 @@ class ProjectsTemplatesService(base_api.BaseApiService): _NAME = 'projects_templates' def __init__(self, client): - super(DataflowV1b3.ProjectsTemplatesService, self).__init__(client) + super().__init__(client) self._upload_configs = { } @@ -1512,7 +1512,7 @@ class ProjectsService(base_api.BaseApiService): _NAME = 'projects' def __init__(self, client): - super(DataflowV1b3.ProjectsService, self).__init__(client) + super().__init__(client) self._upload_configs = { } diff --git a/sdks/python/apache_beam/runners/dataflow/native_io/iobase.py b/sdks/python/apache_beam/runners/dataflow/native_io/iobase.py index c545ecd2b176..3d1afe546901 100644 --- a/sdks/python/apache_beam/runners/dataflow/native_io/iobase.py +++ b/sdks/python/apache_beam/runners/dataflow/native_io/iobase.py @@ -334,7 +334,7 @@ def __init__(self, sink): Args: sink: Sink to use for the write """ - super(_NativeWrite, self).__init__() + super().__init__() self.sink = sink def expand(self, pcoll): diff --git a/sdks/python/apache_beam/runners/dataflow/ptransform_overrides.py b/sdks/python/apache_beam/runners/dataflow/ptransform_overrides.py index add888522716..e8a660c0ccf9 100644 --- a/sdks/python/apache_beam/runners/dataflow/ptransform_overrides.py +++ b/sdks/python/apache_beam/runners/dataflow/ptransform_overrides.py @@ -198,7 +198,7 @@ def expand(self, pbegin): class WriteToBigQueryPTransformOverride(PTransformOverride): def __init__(self, pipeline, options): - super(WriteToBigQueryPTransformOverride, self).__init__() + super().__init__() self.options = options self.outputs = [] @@ -214,7 +214,7 @@ def _check_bq_outputs(self, pipeline): gives a user-friendsly error. """ # Imported here to avoid circular dependencies. - # pylint: disable=wrong-import-order, wrong-import-position + # pylint: disable=wrong-import-order, wrong-import-position, unused-import from apache_beam.pipeline import PipelineVisitor from apache_beam.io import WriteToBigQuery diff --git a/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py index b63249061b6a..d4743a558f3e 100644 --- a/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py +++ b/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py @@ -50,8 +50,7 @@ def run_pipeline(self, pipeline, options): # send this option to remote executors. test_options.on_success_matcher = None - self.result = super(TestDataflowRunner, - self).run_pipeline(pipeline, options) + self.result = super().run_pipeline(pipeline, options) if self.result.has_job: # TODO(markflyhigh)(BEAM-1890): Use print since Nose dosen't show logs # in some cases. diff --git a/sdks/python/apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py b/sdks/python/apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py index 5d8f21c23f88..7eba868afba0 100644 --- a/sdks/python/apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py +++ b/sdks/python/apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py @@ -147,13 +147,15 @@ def test_visitor_not_sorted(self): # Convert to string to assert they are equal. out_of_order_labels = { - str(k): [str(t) for t in v_out_of_order.value_to_consumers[k]] - for k in v_out_of_order.value_to_consumers + str(k): [str(t) for t in value_to_consumer] + for k, + value_to_consumer in v_out_of_order.value_to_consumers.items() } original_labels = { - str(k): [str(t) for t in v_original.value_to_consumers[k]] - for k in v_original.value_to_consumers + str(k): [str(t) for t in value_to_consumer] + for k, + value_to_consumer in v_original.value_to_consumers.items() } self.assertDictEqual(out_of_order_labels, original_labels) diff --git a/sdks/python/apache_beam/runners/direct/direct_runner.py b/sdks/python/apache_beam/runners/direct/direct_runner.py index 4149ea2bf402..3b40ad14f723 100644 --- a/sdks/python/apache_beam/runners/direct/direct_runner.py +++ b/sdks/python/apache_beam/runners/direct/direct_runner.py @@ -154,7 +154,7 @@ def expand(self, pcoll): class _GroupAlsoByWindow(ParDo): """The GroupAlsoByWindow transform.""" def __init__(self, windowing): - super(_GroupAlsoByWindow, self).__init__(_GroupAlsoByWindowDoFn(windowing)) + super().__init__(_GroupAlsoByWindowDoFn(windowing)) self.windowing = windowing def expand(self, pcoll): @@ -166,7 +166,7 @@ class _GroupAlsoByWindowDoFn(DoFn): # TODO(robertwb): Support combiner lifting. def __init__(self, windowing): - super(_GroupAlsoByWindowDoFn, self).__init__() + super().__init__() self.windowing = windowing def infer_output_type(self, input_type): @@ -254,7 +254,7 @@ def expand(self, pcoll): value_type]]]) gbk_output_type = typehints.KV[key_type, typehints.Iterable[value_type]] - # pylint: disable=bad-continuation + # pylint: disable=bad-option-value return ( pcoll | 'ReifyWindows' >> ( @@ -565,7 +565,7 @@ def visit_transform(self, applied_ptransform): class DirectPipelineResult(PipelineResult): """A DirectPipelineResult provides access to info about a pipeline.""" def __init__(self, executor, evaluation_context): - super(DirectPipelineResult, self).__init__(PipelineState.RUNNING) + super().__init__(PipelineState.RUNNING) self._executor = executor self._evaluation_context = evaluation_context diff --git a/sdks/python/apache_beam/runners/direct/direct_userstate.py b/sdks/python/apache_beam/runners/direct/direct_userstate.py index 715355cba2a8..196a9a048d7a 100644 --- a/sdks/python/apache_beam/runners/direct/direct_userstate.py +++ b/sdks/python/apache_beam/runners/direct/direct_userstate.py @@ -62,8 +62,7 @@ def _decode(self, value): class ReadModifyWriteRuntimeState(DirectRuntimeState, userstate.ReadModifyWriteRuntimeState): def __init__(self, state_spec, state_tag, current_value_accessor): - super(ReadModifyWriteRuntimeState, - self).__init__(state_spec, state_tag, current_value_accessor) + super().__init__(state_spec, state_tag, current_value_accessor) self._value = UNREAD_VALUE self._cleared = False self._modified = False @@ -96,8 +95,7 @@ def is_modified(self): class BagRuntimeState(DirectRuntimeState, userstate.BagRuntimeState): def __init__(self, state_spec, state_tag, current_value_accessor): - super(BagRuntimeState, - self).__init__(state_spec, state_tag, current_value_accessor) + super().__init__(state_spec, state_tag, current_value_accessor) self._cached_value = UNREAD_VALUE self._cleared = False self._new_values = [] @@ -122,8 +120,7 @@ def clear(self): class SetRuntimeState(DirectRuntimeState, userstate.SetRuntimeState): def __init__(self, state_spec, state_tag, current_value_accessor): - super(SetRuntimeState, - self).__init__(state_spec, state_tag, current_value_accessor) + super().__init__(state_spec, state_tag, current_value_accessor) self._current_accumulator = UNREAD_VALUE self._modified = False @@ -155,8 +152,7 @@ class CombiningValueRuntimeState(DirectRuntimeState, userstate.CombiningValueRuntimeState): """Combining value state interface object passed to user code.""" def __init__(self, state_spec, state_tag, current_value_accessor): - super(CombiningValueRuntimeState, - self).__init__(state_spec, state_tag, current_value_accessor) + super().__init__(state_spec, state_tag, current_value_accessor) self._current_accumulator = UNREAD_VALUE self._modified = False self._combine_fn = copy.deepcopy(state_spec.combine_fn) diff --git a/sdks/python/apache_beam/runners/direct/evaluation_context.py b/sdks/python/apache_beam/runners/direct/evaluation_context.py index 8d50d689cc7b..fbe59b072ae4 100644 --- a/sdks/python/apache_beam/runners/direct/evaluation_context.py +++ b/sdks/python/apache_beam/runners/direct/evaluation_context.py @@ -458,7 +458,7 @@ def shutdown(self): class DirectUnmergedState(InMemoryUnmergedState): """UnmergedState implementation for the DirectRunner.""" def __init__(self): - super(DirectUnmergedState, self).__init__(defensive_copy=False) + super().__init__(defensive_copy=False) class DirectStepContext(object): diff --git a/sdks/python/apache_beam/runners/direct/executor.py b/sdks/python/apache_beam/runners/direct/executor.py index 8b47b0b602bf..0ab3033d68b5 100644 --- a/sdks/python/apache_beam/runners/direct/executor.py +++ b/sdks/python/apache_beam/runners/direct/executor.py @@ -67,7 +67,7 @@ def __init__( self, queue, # type: queue.Queue[_ExecutorService.CallableTask] index): - super(_ExecutorService._ExecutorServiceWorker, self).__init__() + super().__init__() self.queue = queue self._index = index self._default_name = 'ExecutorServiceWorker-' + str(index) @@ -188,14 +188,14 @@ class _SerialEvaluationState(_TransformEvaluationState): _GroupByKeyOnly. """ def __init__(self, executor_service, scheduled): - super(_SerialEvaluationState, self).__init__(executor_service, scheduled) + super().__init__(executor_service, scheduled) self.serial_queue = collections.deque() self.currently_evaluating = None self._lock = threading.Lock() def complete(self, completed_work): self._update_currently_evaluating(None, completed_work) - super(_SerialEvaluationState, self).complete(completed_work) + super().complete(completed_work) def schedule(self, new_work): self._update_currently_evaluating(new_work, None) @@ -210,7 +210,7 @@ def _update_currently_evaluating(self, new_work, completed_work): if self.serial_queue and not self.currently_evaluating: next_work = self.serial_queue.pop() self.currently_evaluating = next_work - super(_SerialEvaluationState, self).schedule(next_work) + super().schedule(next_work) class _TransformExecutorServices(object): diff --git a/sdks/python/apache_beam/runners/direct/sdf_direct_runner_test.py b/sdks/python/apache_beam/runners/direct/sdf_direct_runner_test.py index f60d50c073f2..246d180cddee 100644 --- a/sdks/python/apache_beam/runners/direct/sdf_direct_runner_test.py +++ b/sdks/python/apache_beam/runners/direct/sdf_direct_runner_test.py @@ -146,7 +146,7 @@ def process( class SDFDirectRunnerTest(unittest.TestCase): def setUp(self): - super(SDFDirectRunnerTest, self).setUp() + super().setUp() # Importing following for DirectRunner SDF implemenation for testing. from apache_beam.runners.direct import transform_evaluator self._old_default_max_num_outputs = ( diff --git a/sdks/python/apache_beam/runners/direct/test_direct_runner.py b/sdks/python/apache_beam/runners/direct/test_direct_runner.py index 507ec1a5eb01..084820eb17c6 100644 --- a/sdks/python/apache_beam/runners/direct/test_direct_runner.py +++ b/sdks/python/apache_beam/runners/direct/test_direct_runner.py @@ -39,7 +39,7 @@ def run_pipeline(self, pipeline, options): # send this option to remote executors. test_options.on_success_matcher = None - self.result = super(TestDirectRunner, self).run_pipeline(pipeline, options) + self.result = super().run_pipeline(pipeline, options) try: if not is_streaming: diff --git a/sdks/python/apache_beam/runners/direct/transform_evaluator.py b/sdks/python/apache_beam/runners/direct/transform_evaluator.py index c77e44216f2b..2c4e1d9fb924 100644 --- a/sdks/python/apache_beam/runners/direct/transform_evaluator.py +++ b/sdks/python/apache_beam/runners/direct/transform_evaluator.py @@ -31,8 +31,8 @@ from typing import Tuple from typing import Type -import apache_beam.io as io from apache_beam import coders +from apache_beam import io from apache_beam import pvalue from apache_beam.internal import pickler from apache_beam.runners import common @@ -345,7 +345,7 @@ def __init__( assert not side_inputs self._source = applied_ptransform.transform.source self._source.pipeline_options = evaluation_context.pipeline_options - super(_BoundedReadEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -394,7 +394,7 @@ def __init__( side_inputs): assert not side_inputs self.transform = applied_ptransform.transform - super(_WatermarkControllerEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -464,7 +464,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_PairWithTimingEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -512,7 +512,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_TestStreamEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -602,7 +602,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_PubSubReadEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -736,7 +736,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_FlattenEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -770,7 +770,7 @@ class _TaggedReceivers(dict): def __init__(self, evaluation_context): self._evaluation_context = evaluation_context self._null_receiver = None - super(_TaggedReceivers, self).__init__() + super().__init__() class NullReceiver(common.Receiver): """Ignores undeclared outputs, default execution mode.""" @@ -804,7 +804,7 @@ def __init__(self, side_inputs, perform_dofn_pickle_test=True ): - super(_ParDoEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -904,7 +904,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_GroupByKeyOnlyEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -1006,7 +1006,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_StreamingGroupByKeyOnlyEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -1061,7 +1061,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_StreamingGroupAlsoByWindowEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -1132,7 +1132,7 @@ def __init__( input_committed_bundle, side_inputs): assert not side_inputs - super(_NativeWriteEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -1207,7 +1207,7 @@ def __init__( applied_ptransform, input_committed_bundle, side_inputs): - super(_ProcessElementsEvaluator, self).__init__( + super().__init__( evaluation_context, applied_ptransform, input_committed_bundle, @@ -1273,6 +1273,6 @@ def finish_bundle(self): par_do_result.counters, par_do_result.keyed_watermark_holds, par_do_result.undeclared_tag_values) - for key in self.keyed_holds: - transform_result.keyed_watermark_holds[key] = self.keyed_holds[key] + for key, keyed_hold in self.keyed_holds.items(): + transform_result.keyed_watermark_holds[key] = keyed_hold return transform_result diff --git a/sdks/python/apache_beam/runners/interactive/augmented_pipeline.py b/sdks/python/apache_beam/runners/interactive/augmented_pipeline.py index 37f914b7b9ac..1cfc5bc959c9 100644 --- a/sdks/python/apache_beam/runners/interactive/augmented_pipeline.py +++ b/sdks/python/apache_beam/runners/interactive/augmented_pipeline.py @@ -52,8 +52,8 @@ def __init__( pcolls: cacheable pcolls to be computed/retrieved. If the set is empty, all intermediate pcolls assigned to variables are applicable. """ - assert not pcolls or all([pcoll.pipeline is user_pipeline for pcoll in - pcolls]), 'All %s need to belong to %s' % (pcolls, user_pipeline) + assert not pcolls or all(pcoll.pipeline is user_pipeline for pcoll in + pcolls), 'All %s need to belong to %s' % (pcolls, user_pipeline) self._user_pipeline = user_pipeline self._pcolls = pcolls self._cache_manager = ie.current_env().get_cache_manager( diff --git a/sdks/python/apache_beam/runners/interactive/background_caching_job.py b/sdks/python/apache_beam/runners/interactive/background_caching_job.py index 52195380ef00..bb94b3b117a3 100644 --- a/sdks/python/apache_beam/runners/interactive/background_caching_job.py +++ b/sdks/python/apache_beam/runners/interactive/background_caching_job.py @@ -88,7 +88,7 @@ def _background_caching_job_condition_checker(self): time.sleep(0.5) def _should_end_condition_checker(self): - return any([l.is_triggered() for l in self._limiters]) + return any(l.is_triggered() for l in self._limiters) def is_done(self): with self._result_lock: diff --git a/sdks/python/apache_beam/runners/interactive/cache_manager.py b/sdks/python/apache_beam/runners/interactive/cache_manager.py index 9ed0b25fd934..a6974945045c 100644 --- a/sdks/python/apache_beam/runners/interactive/cache_manager.py +++ b/sdks/python/apache_beam/runners/interactive/cache_manager.py @@ -345,7 +345,7 @@ def expand(self, pcoll): class SafeFastPrimitivesCoder(coders.Coder): """This class add an quote/unquote step to escape special characters.""" - # pylint: disable=deprecated-urllib-function + # pylint: disable=bad-option-value def encode(self, value): return quote( diff --git a/sdks/python/apache_beam/runners/interactive/display/interactive_pipeline_graph.py b/sdks/python/apache_beam/runners/interactive/display/interactive_pipeline_graph.py index 48c926f21263..5a0943e12e6d 100644 --- a/sdks/python/apache_beam/runners/interactive/display/interactive_pipeline_graph.py +++ b/sdks/python/apache_beam/runners/interactive/display/interactive_pipeline_graph.py @@ -71,7 +71,7 @@ def __init__( self._referenced_pcollections = referenced_pcollections or set() self._cached_pcollections = cached_pcollections or set() - super(InteractivePipelineGraph, self).__init__( + super().__init__( pipeline=pipeline, default_vertex_attrs={ 'color': 'gray', 'fontcolor': 'gray' diff --git a/sdks/python/apache_beam/runners/interactive/display/pcoll_visualization_test.py b/sdks/python/apache_beam/runners/interactive/display/pcoll_visualization_test.py index 5b0b51d35d62..d34b966b0efa 100644 --- a/sdks/python/apache_beam/runners/interactive/display/pcoll_visualization_test.py +++ b/sdks/python/apache_beam/runners/interactive/display/pcoll_visualization_test.py @@ -59,7 +59,7 @@ def setUp(self): ib.options.display_timezone = pytz.timezone('US/Pacific') self._p = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value self._pcoll = self._p | 'Create' >> beam.Create(range(5)) ib.watch(self) diff --git a/sdks/python/apache_beam/runners/interactive/display/pipeline_graph_test.py b/sdks/python/apache_beam/runners/interactive/display/pipeline_graph_test.py index 9a0c7a9df1be..419cd50ac6e9 100644 --- a/sdks/python/apache_beam/runners/interactive/display/pipeline_graph_test.py +++ b/sdks/python/apache_beam/runners/interactive/display/pipeline_graph_test.py @@ -28,7 +28,7 @@ from apache_beam.runners.interactive.display import pipeline_graph from apache_beam.runners.interactive.testing.mock_ipython import mock_get_ipython -# pylint: disable=range-builtin-not-iterating,unused-variable,possibly-unused-variable +# pylint: disable=bad-option-value,unused-variable,possibly-unused-variable # Reason: # Disable pylint for pipelines built for testing. Not all PCollections are # used but they need to be assigned to variables so that we can test how diff --git a/sdks/python/apache_beam/runners/interactive/interactive_beam_test.py b/sdks/python/apache_beam/runners/interactive/interactive_beam_test.py index da948fdbd2cf..feb9092ade18 100644 --- a/sdks/python/apache_beam/runners/interactive/interactive_beam_test.py +++ b/sdks/python/apache_beam/runners/interactive/interactive_beam_test.py @@ -98,7 +98,7 @@ def test_watch_class_instance(self): @unittest.skipIf(sys.platform == "win32", "[BEAM-10627]") def test_show_always_watch_given_pcolls(self): p = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = p | 'Create' >> beam.Create(range(10)) # The pcoll is not watched since watch(locals()) is not explicitly called. self.assertFalse(pcoll in _get_watched_pcollections_with_variable_names()) @@ -111,7 +111,7 @@ def test_show_always_watch_given_pcolls(self): @unittest.skipIf(sys.platform == "win32", "[BEAM-10627]") def test_show_mark_pcolls_computed_when_done(self): p = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = p | 'Create' >> beam.Create(range(10)) self.assertFalse(pcoll in ie.current_env().computed_pcollections) # The call of show marks pcoll computed. @@ -125,7 +125,7 @@ def test_show_mark_pcolls_computed_when_done(self): 'visualize_computed_pcoll')) def test_show_handles_dict_of_pcolls(self, mocked_visualize): p = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = p | 'Create' >> beam.Create(range(10)) ib.watch(locals()) ie.current_env().track_user_pipelines() @@ -140,7 +140,7 @@ def test_show_handles_dict_of_pcolls(self, mocked_visualize): 'visualize_computed_pcoll')) def test_show_handles_iterable_of_pcolls(self, mocked_visualize): p = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = p | 'Create' >> beam.Create(range(10)) ib.watch(locals()) ie.current_env().track_user_pipelines() @@ -172,7 +172,7 @@ def __init__(self, pcoll): self._pcoll = pcoll p = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = p | 'Create' >> beam.Create(range(10)) ie.current_env().mark_pcollection_computed([pcoll]) ie.current_env()._is_in_ipython = True diff --git a/sdks/python/apache_beam/runners/interactive/interactive_environment_test.py b/sdks/python/apache_beam/runners/interactive/interactive_environment_test.py index f08db0156e83..83a07c84d65e 100644 --- a/sdks/python/apache_beam/runners/interactive/interactive_environment_test.py +++ b/sdks/python/apache_beam/runners/interactive/interactive_environment_test.py @@ -45,8 +45,8 @@ def assertVariableNotWatched(self, variable_name, variable_val): self.assertFalse(self._is_variable_watched(variable_name, variable_val)) def _is_variable_watched(self, variable_name, variable_val): - return any([(variable_name, variable_val) in watching - for watching in ie.current_env().watching()]) + return any((variable_name, variable_val) in watching + for watching in ie.current_env().watching()) def _a_function_with_local_watched(self): local_var_watched = 123 # pylint: disable=possibly-unused-variable diff --git a/sdks/python/apache_beam/runners/interactive/interactive_runner.py b/sdks/python/apache_beam/runners/interactive/interactive_runner.py index 4778737ba38d..e19b85b27728 100644 --- a/sdks/python/apache_beam/runners/interactive/interactive_runner.py +++ b/sdks/python/apache_beam/runners/interactive/interactive_runner.py @@ -203,7 +203,7 @@ def visit_transform(self, transform_node): main_job_result.wait_until_finish() if main_job_result.state is beam.runners.runner.PipelineState.DONE: - # pylint: disable=dict-values-not-iterating + # pylint: disable=bad-option-value ie.current_env().mark_pcollection_computed( pipeline_instrument.cached_pcolls) @@ -222,7 +222,7 @@ def __init__(self, underlying_result, pipeline_instrument): the pipeline being executed with interactivity applied and related metadata including where the interactivity-backing cache lies. """ - super(PipelineResult, self).__init__(underlying_result.state) + super().__init__(underlying_result.state) self._underlying_result = underlying_result self._pipeline_instrument = pipeline_instrument diff --git a/sdks/python/apache_beam/runners/interactive/interactive_runner_test.py b/sdks/python/apache_beam/runners/interactive/interactive_runner_test.py index 69551de15596..ea2b18fef4ac 100644 --- a/sdks/python/apache_beam/runners/interactive/interactive_runner_test.py +++ b/sdks/python/apache_beam/runners/interactive/interactive_runner_test.py @@ -267,7 +267,7 @@ def test_mark_pcollection_completed_after_successful_run(self, cell): ib.watch({'p': p}) with cell: # Cell 2 - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init = p | 'Init' >> beam.Create(range(5)) with cell: # Cell 3 diff --git a/sdks/python/apache_beam/runners/interactive/messaging/interactive_environment_inspector_test.py b/sdks/python/apache_beam/runners/interactive/messaging/interactive_environment_inspector_test.py index 4edb0d47c436..2eb10043f12c 100644 --- a/sdks/python/apache_beam/runners/interactive/messaging/interactive_environment_inspector_test.py +++ b/sdks/python/apache_beam/runners/interactive/messaging/interactive_environment_inspector_test.py @@ -47,13 +47,13 @@ def test_inspect(self, cell): pipeline = beam.Pipeline(ir.InteractiveRunner()) # Early watch the pipeline so that cell re-execution can be handled. ib.watch({'pipeline': pipeline}) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = pipeline | 'Create' >> beam.Create(range(10)) with cell: # Cell 2 # Re-executes the line that created the pcoll causing the original # pcoll no longer inspectable. - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = pipeline | 'Create' >> beam.Create(range(10)) ib.watch(locals()) @@ -106,7 +106,7 @@ def test_inspect_pipelines(self, cell): def test_list_inspectables(self, cell): with cell: # Cell 1 pipeline = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll_1 = pipeline | 'Create' >> beam.Create(range(10)) pcoll_2 = pcoll_1 | 'Square' >> beam.Map(lambda x: x * x) @@ -144,7 +144,7 @@ def test_list_inspectables(self, cell): def test_get_val(self, cell): with cell: # Cell 1 pipeline = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = pipeline | 'Create' >> beam.Create(range(10)) with cell: # Cell 2 @@ -167,7 +167,7 @@ def test_get_val(self, cell): def test_get_pcoll_data(self): pipeline = beam.Pipeline(ir.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value pcoll = pipeline | 'Create' >> beam.Create(list(range(10))) counts = pcoll | beam.combiners.Count.PerElement() diff --git a/sdks/python/apache_beam/runners/interactive/pipeline_fragment.py b/sdks/python/apache_beam/runners/interactive/pipeline_fragment.py index 84fdc9ad24fa..91c9b515bab0 100644 --- a/sdks/python/apache_beam/runners/interactive/pipeline_fragment.py +++ b/sdks/python/apache_beam/runners/interactive/pipeline_fragment.py @@ -204,7 +204,7 @@ def _mark_necessary_transforms_and_pcolls(self, runner_pcolls_to_user_pcolls): # Record all necessary input and side input PCollections. updated_all_inputs.update(producer.inputs) - # pylint: disable=map-builtin-not-iterating + # pylint: disable=bad-option-value side_input_pvalues = set( map(lambda side_input: side_input.pvalue, producer.side_inputs)) updated_all_inputs.update(side_input_pvalues) diff --git a/sdks/python/apache_beam/runners/interactive/pipeline_fragment_test.py b/sdks/python/apache_beam/runners/interactive/pipeline_fragment_test.py index c6d28f6e7189..f1f423f97c0e 100644 --- a/sdks/python/apache_beam/runners/interactive/pipeline_fragment_test.py +++ b/sdks/python/apache_beam/runners/interactive/pipeline_fragment_test.py @@ -49,7 +49,7 @@ def test_build_pipeline_fragment(self, cell): ib.watch(locals()) with cell: # Cell 2 - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init = p | 'Init' >> beam.Create(range(10)) init_expected = p_expected | 'Init' >> beam.Create(range(10)) @@ -71,7 +71,7 @@ def test_user_pipeline_intact_after_deducing_pipeline_fragment(self, cell): ib.watch({'p': p}) with cell: # Cell 2 - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init = p | 'Init' >> beam.Create(range(10)) with cell: # Cell 3 @@ -100,7 +100,7 @@ def test_pipeline_fragment_produces_correct_data(self, cell): ib.watch({'p': p}) with cell: # Cell 2 - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init = p | 'Init' >> beam.Create(range(5)) with cell: # Cell 3 @@ -138,7 +138,7 @@ def test_pipeline_composites(self, cell): ib.watch({'p': p}) with cell: # Cell 2 - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init = p | 'Init' >> beam.Create(range(5)) with cell: # Cell 3 diff --git a/sdks/python/apache_beam/runners/interactive/pipeline_instrument_test.py b/sdks/python/apache_beam/runners/interactive/pipeline_instrument_test.py index bba315d6a8c5..893603ddbb52 100644 --- a/sdks/python/apache_beam/runners/interactive/pipeline_instrument_test.py +++ b/sdks/python/apache_beam/runners/interactive/pipeline_instrument_test.py @@ -51,7 +51,7 @@ def cache_key_of(self, name, pcoll): def test_pcoll_to_pcoll_id(self): p = beam.Pipeline(interactive_runner.InteractiveRunner()) ie.current_env().set_cache_manager(InMemoryCache(), p) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init_pcoll = p | 'Init Create' >> beam.Impulse() _, ctx = p.to_runner_api(return_context=True) self.assertEqual( @@ -81,7 +81,7 @@ def test_pcoll_id_with_runner_pipeline(self): # in the original instance and if the evaluation has changed since last # execution. p2_id_runner = beam.Pipeline(interactive_runner.InteractiveRunner()) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init_pcoll_2 = p2_id_runner | 'Init Create' >> beam.Create(range(10)) ie.current_env().add_derived_pipeline(p_id_runner, p2_id_runner) @@ -94,7 +94,7 @@ def test_pcoll_id_with_runner_pipeline(self): def test_cache_key(self): p = beam.Pipeline(interactive_runner.InteractiveRunner()) ie.current_env().set_cache_manager(InMemoryCache(), p) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init_pcoll = p | 'Init Create' >> beam.Create(range(10)) squares = init_pcoll | 'Square' >> beam.Map(lambda x: x * x) cubes = init_pcoll | 'Cube' >> beam.Map(lambda x: x**3) @@ -114,7 +114,7 @@ def test_cache_key(self): def test_cacheables(self): p_cacheables = beam.Pipeline(interactive_runner.InteractiveRunner()) ie.current_env().set_cache_manager(InMemoryCache(), p_cacheables) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value init_pcoll = p_cacheables | 'Init Create' >> beam.Create(range(10)) squares = init_pcoll | 'Square' >> beam.Map(lambda x: x * x) cubes = init_pcoll | 'Cube' >> beam.Map(lambda x: x**3) @@ -185,7 +185,7 @@ def test_background_caching_pipeline_proto(self): def _example_pipeline(self, watch=True, bounded=True): p_example = beam.Pipeline(interactive_runner.InteractiveRunner()) ie.current_env().set_cache_manager(InMemoryCache(), p_example) - # pylint: disable=range-builtin-not-iterating + # pylint: disable=bad-option-value if bounded: source = beam.Create(range(10)) else: @@ -263,8 +263,8 @@ def enter_composite_transform(self, transform_node): def visit_transform(self, transform_node): if transform_node.inputs: main_inputs = dict(transform_node.main_inputs) - for tag in main_inputs.keys(): - if main_inputs[tag] == init_pcoll: + for tag, main_input in main_inputs.items(): + if main_input == init_pcoll: main_inputs[tag] = cached_init_pcoll transform_node.main_inputs = main_inputs @@ -780,7 +780,7 @@ def test_side_effect_pcoll_is_included(self): # Deliberately not assign the result to a variable to make it a # "side effect" transform. Note we never watch anything from # the pipeline defined locally either. - # pylint: disable=range-builtin-not-iterating,expression-not-assigned + # pylint: disable=bad-option-value,expression-not-assigned pipeline_with_side_effect | 'Init Create' >> beam.Create(range(10)) pipeline_instrument = instr.build_pipeline_instrument( pipeline_with_side_effect) diff --git a/sdks/python/apache_beam/runners/interactive/testing/integration/notebook_executor.py b/sdks/python/apache_beam/runners/interactive/testing/integration/notebook_executor.py index ecff3bd40560..6a80639ee285 100644 --- a/sdks/python/apache_beam/runners/interactive/testing/integration/notebook_executor.py +++ b/sdks/python/apache_beam/runners/interactive/testing/integration/notebook_executor.py @@ -151,7 +151,7 @@ class IFrameParser(HTMLParser): """A parser to extract iframe content from given HTML.""" def __init__(self): self._srcdocs = [] - super(IFrameParser, self).__init__() + super().__init__() def handle_starttag(self, tag, attrs): if tag == 'iframe': diff --git a/sdks/python/apache_beam/runners/interactive/testing/integration/screen_diff.py b/sdks/python/apache_beam/runners/interactive/testing/integration/screen_diff.py index 5e0c8551ca1c..a1c9971b0882 100644 --- a/sdks/python/apache_beam/runners/interactive/testing/integration/screen_diff.py +++ b/sdks/python/apache_beam/runners/interactive/testing/integration/screen_diff.py @@ -175,7 +175,7 @@ def __init__(self, *args, **kwargs): self.baseline_directory = os.path.join(os.getcwd(), self._golden_dir) self.output_directory = os.path.join( os.getcwd(), self._test_notebook_dir, 'output') - super(BaseTestCase, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) @classmethod def get_web_driver(cls): @@ -195,7 +195,7 @@ def run(self, result=None): self._golden_dir, self._cleanup) as test_env: self._test_env = test_env - super(BaseTestCase, self).run(result) + super().run(result) def explicit_wait(self): """Wait for common elements to be visible.""" diff --git a/sdks/python/apache_beam/runners/interactive/testing/integration/tests/screen_diff_test.py b/sdks/python/apache_beam/runners/interactive/testing/integration/tests/screen_diff_test.py index 0d36c8844349..a3f8ace0b53f 100644 --- a/sdks/python/apache_beam/runners/interactive/testing/integration/tests/screen_diff_test.py +++ b/sdks/python/apache_beam/runners/interactive/testing/integration/tests/screen_diff_test.py @@ -29,7 +29,7 @@ class DataFramesTest(BaseTestCase): def __init__(self, *args, **kwargs): kwargs['golden_size'] = (1024, 10000) - super(DataFramesTest, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def explicit_wait(self): try: @@ -39,6 +39,7 @@ def explicit_wait(self): WebDriverWait(self.driver, 5).until( expected_conditions.presence_of_element_located((By.ID, 'test-done'))) + # pylint: disable=bare-except except: pass # The test will be ignored. @@ -50,7 +51,7 @@ def test_dataframes(self): class InitSquareCubeTest(BaseTestCase): def __init__(self, *args, **kwargs): kwargs['golden_size'] = (1024, 10000) - super(InitSquareCubeTest, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def test_init_square_cube_notebook(self): self.assert_notebook('init_square_cube') diff --git a/sdks/python/apache_beam/runners/interactive/testing/pipeline_assertion.py b/sdks/python/apache_beam/runners/interactive/testing/pipeline_assertion.py index 9f4bdbca6327..9b07342eecab 100644 --- a/sdks/python/apache_beam/runners/interactive/testing/pipeline_assertion.py +++ b/sdks/python/apache_beam/runners/interactive/testing/pipeline_assertion.py @@ -88,10 +88,9 @@ def _assert_pipeline_proto_contains_top_level_transform( pipeline_proto.root_transform_ids[0]].subtransforms test_case.assertEqual( contain, - any([ + any( transform_label in top_level_transform_label - for top_level_transform_label in top_level_transform_labels - ])) + for top_level_transform_label in top_level_transform_labels)) def _assert_transform_equal( diff --git a/sdks/python/apache_beam/runners/interactive/utils.py b/sdks/python/apache_beam/runners/interactive/utils.py index 49b87bac85d9..4eaf7b4f5fcd 100644 --- a/sdks/python/apache_beam/runners/interactive/utils.py +++ b/sdks/python/apache_beam/runners/interactive/utils.py @@ -150,8 +150,8 @@ def register_ipython_log_handler(): # will be triggered at the "root"'s own logging level. And if a child logger # sets its logging level, it can take control back. interactive_root_logger = logging.getLogger('apache_beam.runners.interactive') - if any([isinstance(h, IPythonLogHandler) - for h in interactive_root_logger.handlers]): + if any(isinstance(h, IPythonLogHandler) + for h in interactive_root_logger.handlers): return interactive_root_logger.setLevel(logging.INFO) interactive_root_logger.addHandler(IPythonLogHandler()) diff --git a/sdks/python/apache_beam/runners/portability/abstract_job_service.py b/sdks/python/apache_beam/runners/portability/abstract_job_service.py index 55224f49584a..d6919eb466bc 100644 --- a/sdks/python/apache_beam/runners/portability/abstract_job_service.py +++ b/sdks/python/apache_beam/runners/portability/abstract_job_service.py @@ -321,7 +321,7 @@ def __init__( pipeline, options, artifact_port=0): - super(UberJarBeamJob, self).__init__(job_id, job_name, pipeline, options) + super().__init__(job_id, job_name, pipeline, options) self._executable_jar = executable_jar self._jar_uploaded = False self._artifact_port = artifact_port diff --git a/sdks/python/apache_beam/runners/portability/artifact_service.py b/sdks/python/apache_beam/runners/portability/artifact_service.py index 64023eb15aaa..bec9317e33c0 100644 --- a/sdks/python/apache_beam/runners/portability/artifact_service.py +++ b/sdks/python/apache_beam/runners/portability/artifact_service.py @@ -290,6 +290,7 @@ def maybe_store_artifact(artifact, service, dest_dir): elif artifact.type_urn == common_urns.artifact_types.FILE.urn: payload = beam_runner_api_pb2.ArtifactFilePayload.FromString( artifact.type_payload) + # pylint: disable=condition-evals-to-constant if os.path.exists( payload.path) and payload.sha256 and payload.sha256 == sha256( payload.path) and False: diff --git a/sdks/python/apache_beam/runners/portability/flink_runner.py b/sdks/python/apache_beam/runners/portability/flink_runner.py index 6486d3d0a282..efa17cd01a93 100644 --- a/sdks/python/apache_beam/runners/portability/flink_runner.py +++ b/sdks/python/apache_beam/runners/portability/flink_runner.py @@ -42,7 +42,7 @@ def run_pipeline(self, pipeline, options): not portable_options.environment_type and not portable_options.output_executable_path): portable_options.environment_type = 'LOOPBACK' - return super(FlinkRunner, self).run_pipeline(pipeline, options) + return super().run_pipeline(pipeline, options) def default_job_server(self, options): flink_options = options.view_as(pipeline_options.FlinkRunnerOptions) @@ -82,7 +82,7 @@ def add_http_scheme(flink_master): class FlinkJarJobServer(job_server.JavaJarJobServer): def __init__(self, options): - super(FlinkJarJobServer, self).__init__(options) + super().__init__(options) options = options.view_as(pipeline_options.FlinkRunnerOptions) self._jar = options.flink_job_server_jar self._master_url = options.flink_master diff --git a/sdks/python/apache_beam/runners/portability/flink_runner_test.py b/sdks/python/apache_beam/runners/portability/flink_runner_test.py index 7d26a70c9e39..cb6345e77efc 100644 --- a/sdks/python/apache_beam/runners/portability/flink_runner_test.py +++ b/sdks/python/apache_beam/runners/portability/flink_runner_test.py @@ -70,7 +70,7 @@ class FlinkRunnerTest(portable_runner_test.PortableRunnerTest): flink_job_server_jar = None def __init__(self, *args, **kwargs): - super(FlinkRunnerTest, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.environment_type = None self.environment_config = None @@ -123,7 +123,7 @@ def tearDownClass(cls): if cls.conf_dir and exists(cls.conf_dir): _LOGGER.info("removing conf dir: %s" % cls.conf_dir) rmtree(cls.conf_dir) - super(FlinkRunnerTest, cls).tearDownClass() + super().tearDownClass() @classmethod def _create_conf_dir(cls): @@ -195,7 +195,7 @@ def set_flink_job_server_jar(cls, flink_job_server_jar): cls.flink_job_server_jar = flink_job_server_jar def create_options(self): - options = super(FlinkRunnerTest, self).create_options() + options = super().create_options() options.view_as(DebugOptions).experiments = ['beam_fn_api'] options._all_options['parallelism'] = 2 options.view_as(PortableOptions).environment_type = self.environment_type @@ -291,11 +291,10 @@ def test_sql(self): def test_flattened_side_input(self): # Blocked on support for transcoding # https://jira.apache.org/jira/browse/BEAM-6523 - super(FlinkRunnerTest, - self).test_flattened_side_input(with_transcoding=False) + super().test_flattened_side_input(with_transcoding=False) def test_metrics(self): - super(FlinkRunnerTest, self).test_metrics(check_gauge=False) + super().test_metrics(check_gauge=False) def test_flink_metrics(self): """Run a simple DoFn that increments a counter and verifies state @@ -405,7 +404,7 @@ class FlinkRunnerTestOptimized(FlinkRunnerTest): # TODO: Remove these tests after resolving BEAM-7248 and enabling # PortableRunnerOptimized def create_options(self): - options = super(FlinkRunnerTestOptimized, self).create_options() + options = super().create_options() options.view_as(DebugOptions).experiments = [ 'pre_optimize=all' ] + options.view_as(DebugOptions).experiments @@ -432,14 +431,14 @@ def test_pack_combiners(self): class FlinkRunnerTestStreaming(FlinkRunnerTest): def __init__(self, *args, **kwargs): - super(FlinkRunnerTestStreaming, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.enable_commit = False def setUp(self): self.enable_commit = False def create_options(self): - options = super(FlinkRunnerTestStreaming, self).create_options() + options = super().create_options() options.view_as(StandardOptions).streaming = True if self.enable_commit: options._all_options['checkpointing_interval'] = 3000 @@ -448,11 +447,11 @@ def create_options(self): def test_callbacks_with_exception(self): self.enable_commit = True - super(FlinkRunnerTest, self).test_callbacks_with_exception() + super().test_callbacks_with_exception() def test_register_finalizations(self): self.enable_commit = True - super(FlinkRunnerTest, self).test_register_finalizations() + super().test_register_finalizations() if __name__ == '__main__': diff --git a/sdks/python/apache_beam/runners/portability/flink_uber_jar_job_server.py b/sdks/python/apache_beam/runners/portability/flink_uber_jar_job_server.py index 9b0d6ff4f571..9a40a55c7601 100644 --- a/sdks/python/apache_beam/runners/portability/flink_uber_jar_job_server.py +++ b/sdks/python/apache_beam/runners/portability/flink_uber_jar_job_server.py @@ -43,7 +43,7 @@ class FlinkUberJarJobServer(abstract_job_service.AbstractJobServiceServicer): the pipeline artifacts. """ def __init__(self, master_url, options): - super(FlinkUberJarJobServer, self).__init__() + super().__init__() self._master_url = master_url self._executable_jar = ( options.view_as( @@ -116,7 +116,7 @@ def __init__( pipeline, options, artifact_port=0): - super(FlinkBeamJob, self).__init__( + super().__init__( executable_jar, job_id, job_name, @@ -208,7 +208,7 @@ def get_state(self): state, timestamp = self._get_state() if timestamp is None: # state has not changed since it was last checked: use previous timestamp - return super(FlinkBeamJob, self).get_state() + return super().get_state() else: return state, timestamp diff --git a/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner.py b/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner.py index be8fe60bd2c1..4b77772cdf21 100644 --- a/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner.py +++ b/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner.py @@ -115,7 +115,7 @@ def __init__( waits before requesting progress from the SDK. is_drain: identify whether expand the sdf graph in the drain mode. """ - super(FnApiRunner, self).__init__() + super().__init__() self._default_environment = ( default_environment or environments.EmbeddedPythonEnvironment.default()) self._bundle_repeat = bundle_repeat @@ -1140,7 +1140,7 @@ def __init__( cache_token_generator=None, **kwargs): # type: (...) -> None - super(ParallelBundleManager, self).__init__( + super().__init__( bundle_context_manager, progress_frequency, cache_token_generator=cache_token_generator) @@ -1184,7 +1184,7 @@ def execute(part_map_input_timers): dry_run) with thread_pool_executor.shared_unbounded_instance() as executor: - for result, split_result in executor.map(execute, zip(part_inputs, # pylint: disable=zip-builtin-not-iterating + for result, split_result in executor.map(execute, zip(part_inputs, # pylint: disable=bad-option-value timer_inputs)): split_result_list += split_result if merged_result is None: @@ -1214,7 +1214,7 @@ def __init__(self, callback=None ): # type: (...) -> None - super(ProgressRequester, self).__init__() + super().__init__() self._worker_handler = worker_handler self._instruction_id = instruction_id self._frequency = frequency @@ -1299,7 +1299,7 @@ def monitoring_infos(self): class RunnerResult(runner.PipelineResult): def __init__(self, state, monitoring_infos_by_stage): - super(RunnerResult, self).__init__(state) + super().__init__(state) self._monitoring_infos_by_stage = monitoring_infos_by_stage self._metrics = None self._monitoring_metrics = None diff --git a/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner_test.py b/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner_test.py index c0029635c322..87d03ca9db98 100644 --- a/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner_test.py +++ b/sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner_test.py @@ -1086,10 +1086,10 @@ def expand(self, pcoll): if assert_using_counter_names: if pipeline_options.view_as(StandardOptions).streaming: self.assertFalse( - any([re.match(packed_step_name_regex, s) for s in step_names])) + any(re.match(packed_step_name_regex, s) for s in step_names)) else: self.assertTrue( - any([re.match(packed_step_name_regex, s) for s in step_names])) + any(re.match(packed_step_name_regex, s) for s in step_names)) @retry(stop=stop_after_attempt(3)) def test_pack_combiners(self): @@ -1970,8 +1970,7 @@ def create_tracker(self, restriction): class CheckpointOnlyOffsetRestrictionTracker( restriction_trackers.OffsetRestrictionTracker): def try_split(self, unused_fraction_of_remainder): - return super(CheckpointOnlyOffsetRestrictionTracker, - self).try_split(0.0) + return super().try_split(0.0) return CheckpointOnlyOffsetRestrictionTracker(restriction) if self.use_bounded_offset_range: @@ -1987,7 +1986,7 @@ def restriction_size(self, element, restriction): class OffsetRangeProviderWithTruncate(OffsetRangeProvider): def __init__(self): - super(OffsetRangeProviderWithTruncate, self).__init__(True) + super().__init__(True) def truncate(self, element, restriction): return restriction_trackers.OffsetRange( diff --git a/sdks/python/apache_beam/runners/portability/fn_api_runner/trigger_manager.py b/sdks/python/apache_beam/runners/portability/fn_api_runner/trigger_manager.py index 0fd74b33a8cb..021f5950d71d 100644 --- a/sdks/python/apache_beam/runners/portability/fn_api_runner/trigger_manager.py +++ b/sdks/python/apache_beam/runners/portability/fn_api_runner/trigger_manager.py @@ -96,7 +96,7 @@ def read_watermark(watermark_state): class TriggerMergeContext(WindowFn.MergeContext): def __init__( self, all_windows, context: 'FnRunnerStatefulTriggerContext', windowing): - super(TriggerMergeContext, self).__init__(all_windows) + super().__init__(all_windows) self.trigger_context = context self.windowing = windowing self.merged_away: typing.Dict[BoundedWindow, BoundedWindow] = {} diff --git a/sdks/python/apache_beam/runners/portability/fn_api_runner/worker_handlers.py b/sdks/python/apache_beam/runners/portability/fn_api_runner/worker_handlers.py index b967e01260d5..0bdade951b86 100644 --- a/sdks/python/apache_beam/runners/portability/fn_api_runner/worker_handlers.py +++ b/sdks/python/apache_beam/runners/portability/fn_api_runner/worker_handlers.py @@ -106,7 +106,7 @@ def __init__(self): self._push_queue = queue.Queue( ) # type: queue.Queue[Union[beam_fn_api_pb2.InstructionRequest, Sentinel]] self._input = None # type: Optional[Iterable[beam_fn_api_pb2.InstructionResponse]] - self._futures_by_id = dict() # type: Dict[str, ControlFuture] + self._futures_by_id = {} # type: Dict[str, ControlFuture] self._read_thread = threading.Thread( name='beam_control_read', target=self._read) self._state = BeamFnControlServicer.UNSTARTED_STATE @@ -354,7 +354,7 @@ def __init__(self, worker_manager, # type: WorkerHandlerManager ): # type: (...) -> None - super(EmbeddedWorkerHandler, self).__init__( + super().__init__( self, data_plane.InMemoryDataChannel(), state, provision_info) self.control_conn = self # type: ignore # need Protocol to describe this self.data_conn = self.data_plane_handler @@ -548,7 +548,7 @@ def __init__(self, ): # type: (...) -> None self._grpc_server = grpc_server - super(GrpcWorkerHandler, self).__init__( + super().__init__( self._grpc_server.control_handler, self._grpc_server.data_plane_handler, state, @@ -591,7 +591,7 @@ def close(self): # type: () -> None self.control_conn.close() self.data_conn.close() - super(GrpcWorkerHandler, self).close() + super().close() def port_from_worker(self, port): # type: (int) -> str @@ -612,8 +612,7 @@ def __init__(self, grpc_server # type: GrpcServer ): # type: (...) -> None - super(ExternalWorkerHandler, - self).__init__(state, provision_info, grpc_server) + super().__init__(state, provision_info, grpc_server) self._external_payload = external_payload def start_worker(self): @@ -657,8 +656,7 @@ def __init__(self, grpc_server # type: GrpcServer ): # type: (...) -> None - super(EmbeddedGrpcWorkerHandler, - self).__init__(state, provision_info, grpc_server) + super().__init__(state, provision_info, grpc_server) from apache_beam.transforms.environments import EmbeddedPythonGrpcEnvironment config = EmbeddedPythonGrpcEnvironment.parse_config(payload.decode('utf-8')) @@ -697,8 +695,7 @@ def __init__(self, grpc_server # type: GrpcServer ): # type: (...) -> None - super(SubprocessSdkWorkerHandler, - self).__init__(state, provision_info, grpc_server) + super().__init__(state, provision_info, grpc_server) self._worker_command_line = worker_command_line def start_worker(self): @@ -728,8 +725,7 @@ def __init__(self, grpc_server # type: GrpcServer ): # type: (...) -> None - super(DockerSdkWorkerHandler, - self).__init__(state, provision_info, grpc_server) + super().__init__(state, provision_info, grpc_server) self._container_image = payload.container_image self._container_id = None # type: Optional[bytes] @@ -743,7 +739,7 @@ def host_from_worker(self): # Gets ipv4 address of current host. Note the host is not guaranteed to # be localhost because the python SDK could be running within a container. return socket.gethostbyname(socket.getfqdn()) - return super(DockerSdkWorkerHandler, self).host_from_worker() + return super().host_from_worker() def start_worker(self): # type: () -> None @@ -813,7 +809,8 @@ def watch_container(self): 'SDK exited unexpectedly. ' 'Final status is %s. Final log line is %s' % ( status.decode('utf-8'), - logs.decode('utf-8').strip().split('\n')[-1]))) + logs.decode('utf-8').strip().rsplit('\n', + maxsplit=1)[-1]))) time.sleep(5) def stop_worker(self): diff --git a/sdks/python/apache_beam/runners/portability/job_server.py b/sdks/python/apache_beam/runners/portability/job_server.py index b1363c6a4021..eda8755e18ab 100644 --- a/sdks/python/apache_beam/runners/portability/job_server.py +++ b/sdks/python/apache_beam/runners/portability/job_server.py @@ -121,7 +121,7 @@ def local_temp_dir(self, **kwargs): class JavaJarJobServer(SubprocessJobServer): def __init__(self, options): - super(JavaJarJobServer, self).__init__() + super().__init__() options = options.view_as(pipeline_options.JobServerOptions) self._job_port = options.job_port self._artifact_port = options.artifact_port diff --git a/sdks/python/apache_beam/runners/portability/local_job_service.py b/sdks/python/apache_beam/runners/portability/local_job_service.py index aedfc03fcaa9..77608ee9c599 100644 --- a/sdks/python/apache_beam/runners/portability/local_job_service.py +++ b/sdks/python/apache_beam/runners/portability/local_job_service.py @@ -74,7 +74,7 @@ class LocalJobServicer(abstract_job_service.AbstractJobServiceServicer): subprocesses for the runner and worker(s). """ def __init__(self, staging_dir=None): - super(LocalJobServicer, self).__init__() + super().__init__() self._cleanup_staging_dir = staging_dir is None self._staging_dir = staging_dir or tempfile.mkdtemp() self._artifact_service = artifact_service.ArtifactStagingService( @@ -234,8 +234,7 @@ def __init__(self, artifact_staging_endpoint, # type: Optional[endpoints_pb2.ApiServiceDescriptor] artifact_service, # type: artifact_service.ArtifactStagingService ): - super(BeamJob, - self).__init__(job_id, provision_info.job_name, pipeline, options) + super().__init__(job_id, provision_info.job_name, pipeline, options) self._provision_info = provision_info self._artifact_staging_endpoint = artifact_staging_endpoint self._artifact_service = artifact_service @@ -246,7 +245,7 @@ def __init__(self, def set_state(self, new_state): """Set the latest state as an int enum and notify consumers""" - timestamp = super(BeamJob, self).set_state(new_state) + timestamp = super().set_state(new_state) if timestamp is not None: # Inform consumers of the new state. for queue in self._state_queues: @@ -389,7 +388,7 @@ class JobLogHandler(logging.Handler): } def __init__(self, log_queues): - super(JobLogHandler, self).__init__() + super().__init__() self._last_id = 0 self._logged_thread = None self._log_queues = log_queues diff --git a/sdks/python/apache_beam/runners/portability/portable_runner.py b/sdks/python/apache_beam/runners/portability/portable_runner.py index d040d2ec6e0f..3d2ebcd472e1 100644 --- a/sdks/python/apache_beam/runners/portability/portable_runner.py +++ b/sdks/python/apache_beam/runners/portability/portable_runner.py @@ -493,7 +493,7 @@ def __init__( message_stream, state_stream, cleanup_callbacks=()): - super(PipelineResult, self).__init__(beam_job_api_pb2.JobState.UNSPECIFIED) + super().__init__(beam_job_api_pb2.JobState.UNSPECIFIED) self._job_service = job_service self._job_id = job_id self._messages = [] diff --git a/sdks/python/apache_beam/runners/portability/portable_runner_test.py b/sdks/python/apache_beam/runners/portability/portable_runner_test.py index 5d4217f306e8..b0404640ac79 100644 --- a/sdks/python/apache_beam/runners/portability/portable_runner_test.py +++ b/sdks/python/apache_beam/runners/portability/portable_runner_test.py @@ -224,7 +224,7 @@ def test_draining_sdf_with_sdf_initiated_checkpointing(self): @unittest.skip("BEAM-7248") class PortableRunnerOptimized(PortableRunnerTest): def create_options(self): - options = super(PortableRunnerOptimized, self).create_options() + options = super().create_options() options.view_as(DebugOptions).add_experiment('pre_optimize=all') options.view_as(DebugOptions).add_experiment('state_cache_size=100') options.view_as(DebugOptions).add_experiment( @@ -236,7 +236,7 @@ def create_options(self): # beam:runner:executable_stage:v1. class PortableRunnerOptimizedWithoutFusion(PortableRunnerTest): def create_options(self): - options = super(PortableRunnerOptimizedWithoutFusion, self).create_options() + options = super().create_options() options.view_as(DebugOptions).add_experiment( 'pre_optimize=all_except_fusion') options.view_as(DebugOptions).add_experiment('state_cache_size=100') @@ -257,7 +257,7 @@ def tearDownClass(cls): cls._worker_server.stop(1) def create_options(self): - options = super(PortableRunnerTestWithExternalEnv, self).create_options() + options = super().create_options() options.view_as(PortableOptions).environment_type = 'EXTERNAL' options.view_as(PortableOptions).environment_config = self._worker_address return options @@ -268,7 +268,7 @@ class PortableRunnerTestWithSubprocesses(PortableRunnerTest): _use_subprocesses = True def create_options(self): - options = super(PortableRunnerTestWithSubprocesses, self).create_options() + options = super().create_options() options.view_as(PortableOptions).environment_type = ( python_urns.SUBPROCESS_SDK) options.view_as(PortableOptions).environment_config = ( @@ -297,7 +297,7 @@ class PortableRunnerTestWithSubprocessesAndMultiWorkers( _use_subprocesses = True def create_options(self): - options = super(PortableRunnerTestWithSubprocessesAndMultiWorkers, self) \ + options = super() \ .create_options() options.view_as(DirectOptions).direct_num_workers = 2 return options @@ -396,7 +396,7 @@ def hasDockerImage(): "no docker image") class PortableRunnerTestWithLocalDocker(PortableRunnerTest): def create_options(self): - options = super(PortableRunnerTestWithLocalDocker, self).create_options() + options = super().create_options() options.view_as(PortableOptions).job_endpoint = 'embed' return options diff --git a/sdks/python/apache_beam/runners/portability/samza_runner_test.py b/sdks/python/apache_beam/runners/portability/samza_runner_test.py index 2f60ad81b272..e946371ef962 100644 --- a/sdks/python/apache_beam/runners/portability/samza_runner_test.py +++ b/sdks/python/apache_beam/runners/portability/samza_runner_test.py @@ -126,7 +126,7 @@ def get_expansion_service(cls): return 'localhost:%s' % cls.expansion_port def create_options(self): - options = super(SamzaRunnerTest, self).create_options() + options = super().create_options() options.view_as(PortableOptions).environment_type = self.environment_type options.view_as( PortableOptions).environment_options = self.environment_options @@ -140,8 +140,7 @@ def test_metrics(self): def test_flattened_side_input(self): # Blocked on support for transcoding # https://issues.apache.org/jira/browse/BEAM-12681 - super(SamzaRunnerTest, - self).test_flattened_side_input(with_transcoding=False) + super().test_flattened_side_input(with_transcoding=False) def test_pack_combiners(self): # Stages produced by translations.pack_combiners are fused diff --git a/sdks/python/apache_beam/runners/portability/spark_runner.py b/sdks/python/apache_beam/runners/portability/spark_runner.py index bb7b4c0465e2..b1d754d89836 100644 --- a/sdks/python/apache_beam/runners/portability/spark_runner.py +++ b/sdks/python/apache_beam/runners/portability/spark_runner.py @@ -44,7 +44,7 @@ def run_pipeline(self, pipeline, options): not portable_options.environment_type and not portable_options.output_executable_path): portable_options.environment_type = 'LOOPBACK' - return super(SparkRunner, self).run_pipeline(pipeline, options) + return super().run_pipeline(pipeline, options) def default_job_server(self, options): spark_options = options.view_as(pipeline_options.SparkRunnerOptions) @@ -73,7 +73,7 @@ def create_job_service_handle(self, job_service, options): class SparkJarJobServer(job_server.JavaJarJobServer): def __init__(self, options): - super(SparkJarJobServer, self).__init__(options) + super().__init__(options) options = options.view_as(pipeline_options.SparkRunnerOptions) self._jar = options.spark_job_server_jar self._master_url = options.spark_master_url diff --git a/sdks/python/apache_beam/runners/portability/spark_runner_test.py b/sdks/python/apache_beam/runners/portability/spark_runner_test.py index ba5f103fc1b5..fb84c27a91e3 100644 --- a/sdks/python/apache_beam/runners/portability/spark_runner_test.py +++ b/sdks/python/apache_beam/runners/portability/spark_runner_test.py @@ -130,7 +130,7 @@ def set_spark_job_server_jar(cls, spark_job_server_jar): cls.spark_job_server_jar = spark_job_server_jar def create_options(self): - options = super(SparkRunnerTest, self).create_options() + options = super().create_options() options.view_as(PortableOptions).environment_type = self.environment_type options.view_as( PortableOptions).environment_options = self.environment_options @@ -175,8 +175,7 @@ def test_pardo_dynamic_timer(self): def test_flattened_side_input(self): # Blocked on support for transcoding # https://jira.apache.org/jira/browse/BEAM-7236 - super(SparkRunnerTest, - self).test_flattened_side_input(with_transcoding=False) + super().test_flattened_side_input(with_transcoding=False) def test_custom_merging_window(self): raise unittest.SkipTest("BEAM-11004") diff --git a/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server.py b/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server.py index 60b2e88357c9..2f880d208c3c 100644 --- a/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server.py +++ b/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server.py @@ -45,7 +45,7 @@ class SparkUberJarJobServer(abstract_job_service.AbstractJobServiceServicer): the pipeline artifacts. """ def __init__(self, rest_url, options): - super(SparkUberJarJobServer, self).__init__() + super().__init__() self._rest_url = rest_url self._artifact_port = ( options.view_as(pipeline_options.JobServerOptions).artifact_port) @@ -108,7 +108,7 @@ def __init__( pipeline, options, artifact_port=0): - super(SparkBeamJob, self).__init__( + super().__init__( executable_jar, job_id, job_name, @@ -218,7 +218,7 @@ def get_state(self): timestamp = self.set_state(state) if timestamp is None: # State has not changed since last check. Use previous timestamp. - return super(SparkBeamJob, self).get_state() + return super().get_state() else: return state, timestamp diff --git a/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server_test.py b/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server_test.py index d9bb7e686f05..6bb27b5746da 100644 --- a/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server_test.py +++ b/sdks/python/apache_beam/runners/portability/spark_uber_jar_job_server_test.py @@ -132,6 +132,7 @@ def spark_submission_status_response(state): # Prepare the job. prepare_response = plan.prepare(beam_runner_api_pb2.Pipeline()) + # pylint: disable=assignment-from-no-return retrieval_token = plan.stage( beam_runner_api_pb2.Pipeline(), prepare_response.artifact_staging_endpoint.url, diff --git a/sdks/python/apache_beam/runners/runner.py b/sdks/python/apache_beam/runners/runner.py index 1030a53ff3f3..b90eff5cf276 100644 --- a/sdks/python/apache_beam/runners/runner.py +++ b/sdks/python/apache_beam/runners/runner.py @@ -41,7 +41,7 @@ __all__ = ['PipelineRunner', 'PipelineState', 'PipelineResult'] _RUNNER_MAP = { - path.split('.')[-1].lower(): path + path.rsplit('.', maxsplit=1)[-1].lower(): path for path in StandardOptions.ALL_KNOWN_RUNNERS } diff --git a/sdks/python/apache_beam/runners/worker/bundle_processor.py b/sdks/python/apache_beam/runners/worker/bundle_processor.py index 497d613f46f9..532b7dd6960d 100644 --- a/sdks/python/apache_beam/runners/worker/bundle_processor.py +++ b/sdks/python/apache_beam/runners/worker/bundle_processor.py @@ -129,8 +129,7 @@ def __init__(self, data_channel # type: data_plane.DataChannel ): # type: (...) -> None - super(RunnerIOOperation, - self).__init__(name_context, None, counter_factory, state_sampler) + super().__init__(name_context, None, counter_factory, state_sampler) self.windowed_coder = windowed_coder self.windowed_coder_impl = windowed_coder.get_impl() # transform_id represents the consumer for the bytes in the data plane for a @@ -158,7 +157,7 @@ def process(self, windowed_value): def finish(self): # type: () -> None self.output_stream.close() - super(DataOutputOperation, self).finish() + super().finish() class DataInputOperation(RunnerIOOperation): @@ -175,7 +174,7 @@ def __init__(self, data_channel # type: data_plane.GrpcClientDataChannel ): # type: (...) -> None - super(DataInputOperation, self).__init__( + super().__init__( operation_name, step_name, consumers, @@ -201,7 +200,7 @@ def __init__(self, def start(self): # type: () -> None - super(DataInputOperation, self).start() + super().start() with self.splitting_lock: self.started = True @@ -223,7 +222,7 @@ def process_encoded(self, encoded_windowed_values): def monitoring_infos(self, transform_id, tag_to_pcollection_id): # type: (str, Dict[str, str]) -> Dict[FrozenSet, metrics_pb2.MonitoringInfo] - all_monitoring_infos = super(DataInputOperation, self).monitoring_infos( + all_monitoring_infos = super().monitoring_infos( transform_id, tag_to_pcollection_id) read_progress_info = monitoring_infos.int64_counter( monitoring_infos.DATA_CHANNEL_READ_INDEX, @@ -297,7 +296,7 @@ def is_valid_split_point(index): element_primaries, element_residuals = split return index - 1, element_primaries, element_residuals, index + 1 # Otherwise, split at the closest element boundary. - # pylint: disable=round-builtin + # pylint: disable=bad-option-value stop_index = index + max(1, int(round(current_element_progress + keep))) if allowed_split_points and stop_index not in allowed_split_points: # Choose the closest allowed split point. @@ -330,7 +329,7 @@ def reset(self): with self.splitting_lock: self.index = -1 self.stop = float('inf') - super(DataInputOperation, self).reset() + super().reset() class _StateBackedIterable(object): diff --git a/sdks/python/apache_beam/runners/worker/data_plane.py b/sdks/python/apache_beam/runners/worker/data_plane.py index d395cee24a08..4baca681d9ed 100644 --- a/sdks/python/apache_beam/runners/worker/data_plane.py +++ b/sdks/python/apache_beam/runners/worker/data_plane.py @@ -76,7 +76,7 @@ def __init__( close_callback=None # type: Optional[Callable[[bytes], None]] ): # type: (...) -> None - super(ClosableOutputStream, self).__init__() + super().__init__() self._close_callback = close_callback def close(self): @@ -117,7 +117,7 @@ def __init__( flush_callback=None, # type: Optional[Callable[[bytes], None]] size_flush_threshold=_DEFAULT_SIZE_FLUSH_THRESHOLD # type: int ): - super(SizeBasedBufferingClosableOutputStream, self).__init__(close_callback) + super().__init__(close_callback) self._flush_callback = flush_callback self._size_flush_threshold = size_flush_threshold @@ -147,8 +147,7 @@ def __init__( time_flush_threshold_ms=_DEFAULT_TIME_FLUSH_THRESHOLD_MS # type: int ): # type: (...) -> None - super(TimeBasedBufferingClosableOutputStream, - self).__init__(close_callback, flush_callback, size_flush_threshold) + super().__init__(close_callback, flush_callback, size_flush_threshold) assert time_flush_threshold_ms > 0 self._time_flush_threshold_ms = time_flush_threshold_ms self._flush_lock = threading.Lock() @@ -159,7 +158,7 @@ def __init__( def flush(self): # type: () -> None with self._flush_lock: - super(TimeBasedBufferingClosableOutputStream, self).flush() + super().flush() def close(self): # type: () -> None @@ -168,7 +167,7 @@ def close(self): if self._periodic_flusher: self._periodic_flusher.cancel() self._periodic_flusher = None - super(TimeBasedBufferingClosableOutputStream, self).close() + super().close() def _schedule_periodic_flush(self): # type: () -> None @@ -663,7 +662,7 @@ def __init__( data_buffer_time_limit_ms=0 # type: int ): # type: (...) -> None - super(GrpcClientDataChannel, self).__init__(data_buffer_time_limit_ms) + super().__init__(data_buffer_time_limit_ms) self.set_inputs(data_stub.Data(self._write_outputs())) diff --git a/sdks/python/apache_beam/runners/worker/log_handler.py b/sdks/python/apache_beam/runners/worker/log_handler.py index 46157db5d097..75cdcf5fb85f 100644 --- a/sdks/python/apache_beam/runners/worker/log_handler.py +++ b/sdks/python/apache_beam/runners/worker/log_handler.py @@ -72,7 +72,7 @@ class FnApiLogRecordHandler(logging.Handler): def __init__(self, log_service_descriptor): # type: (endpoints_pb2.ApiServiceDescriptor) -> None - super(FnApiLogRecordHandler, self).__init__() + super().__init__() self._alive = True self._dropped_logs = 0 @@ -150,7 +150,7 @@ def close(self): self._reader.join() self.release() # Unregister this handler. - super(FnApiLogRecordHandler, self).close() + super().close() except Exception: # Log rather than raising exceptions, to avoid clobbering # underlying errors that may have caused this to close diff --git a/sdks/python/apache_beam/runners/worker/logger.py b/sdks/python/apache_beam/runners/worker/logger.py index e171caf7710b..6a86e001a1aa 100644 --- a/sdks/python/apache_beam/runners/worker/logger.py +++ b/sdks/python/apache_beam/runners/worker/logger.py @@ -43,7 +43,7 @@ class _PerThreadWorkerData(threading.local): def __init__(self): # type: () -> None - super(_PerThreadWorkerData, self).__init__() + super().__init__() # in the list, as going up and down all the way to zero incurs several # reallocations. self.stack = [] # type: List[Dict[str, Any]] @@ -74,7 +74,7 @@ class JsonLogFormatter(logging.Formatter): """A JSON formatter class as expected by the logging standard module.""" def __init__(self, job_id, worker_id): # type: (str, str) -> None - super(JsonLogFormatter, self).__init__() + super().__init__() self.job_id = job_id self.worker_id = worker_id diff --git a/sdks/python/apache_beam/runners/worker/opcounters.py b/sdks/python/apache_beam/runners/worker/opcounters.py index bafbf9f6c645..fad54aaeaf82 100644 --- a/sdks/python/apache_beam/runners/worker/opcounters.py +++ b/sdks/python/apache_beam/runners/worker/opcounters.py @@ -98,7 +98,7 @@ def __exit__(self, exception_type, exception_value, traceback): class NoOpTransformIOCounter(TransformIOCounter): """All operations for IO tracking are no-ops.""" def __init__(self): - super(NoOpTransformIOCounter, self).__init__(None, None) + super().__init__(None, None) def update_current_step(self): pass @@ -148,7 +148,7 @@ def __init__(self, side input, and input_index is the index of the PCollectionView within the list of inputs. """ - super(SideInputReadCounter, self).__init__(counter_factory, state_sampler) + super().__init__(counter_factory, state_sampler) self.declaring_step = declaring_step self.input_index = input_index diff --git a/sdks/python/apache_beam/runners/worker/operations.py b/sdks/python/apache_beam/runners/worker/operations.py index 8689b0c8ad5a..1de42f9908de 100644 --- a/sdks/python/apache_beam/runners/worker/operations.py +++ b/sdks/python/apache_beam/runners/worker/operations.py @@ -21,6 +21,7 @@ """Worker operations executor.""" # pytype: skip-file +# pylint: disable=super-with-arguments import collections import logging diff --git a/sdks/python/apache_beam/runners/worker/sideinputs_test.py b/sdks/python/apache_beam/runners/worker/sideinputs_test.py index f609cf4f814e..2e89b866986f 100644 --- a/sdks/python/apache_beam/runners/worker/sideinputs_test.py +++ b/sdks/python/apache_beam/runners/worker/sideinputs_test.py @@ -44,7 +44,7 @@ def reader(self): class FakeSourceReader(observable.ObservableMixin): def __init__(self, items, notify_observers=False): - super(FakeSourceReader, self).__init__() + super().__init__() self.items = items self.entered = False self.exited = False diff --git a/sdks/python/apache_beam/runners/worker/statesampler.py b/sdks/python/apache_beam/runners/worker/statesampler.py index 7230b248e444..2d975ba4b515 100644 --- a/sdks/python/apache_beam/runners/worker/statesampler.py +++ b/sdks/python/apache_beam/runners/worker/statesampler.py @@ -104,7 +104,7 @@ def __init__(self, self.tracked_thread = None # type: Optional[threading.Thread] self.finished = False self.started = False - super(StateSampler, self).__init__(sampling_period_ms) + super().__init__(sampling_period_ms) @property def stage_name(self): @@ -114,7 +114,7 @@ def stage_name(self): def stop(self): # type: () -> None set_current_tracker(None) - super(StateSampler, self).stop() + super().stop() def stop_if_still_running(self): # type: () -> None @@ -125,7 +125,7 @@ def start(self): # type: () -> None self.tracked_thread = threading.current_thread() set_current_tracker(self) - super(StateSampler, self).start() + super().start() self.started = True def get_info(self): @@ -171,12 +171,8 @@ def scoped_state(self, else: output_counter = self._counter_factory.get_counter( counter_name, Counter.SUM) - self._states_by_name[counter_name] = super(StateSampler, - self)._scoped_state( - counter_name, - name_context, - output_counter, - metrics_container) + self._states_by_name[counter_name] = super()._scoped_state( + counter_name, name_context, output_counter, metrics_container) return self._states_by_name[counter_name] def commit_counters(self): diff --git a/sdks/python/apache_beam/testing/benchmarks/nexmark/queries/winning_bids.py b/sdks/python/apache_beam/testing/benchmarks/nexmark/queries/winning_bids.py index 94f84b6d20dd..52ffd483a840 100644 --- a/sdks/python/apache_beam/testing/benchmarks/nexmark/queries/winning_bids.py +++ b/sdks/python/apache_beam/testing/benchmarks/nexmark/queries/winning_bids.py @@ -45,7 +45,7 @@ class AuctionOrBidWindow(IntervalWindow): """Windows for open auctions and bids.""" def __init__(self, start, end, auction_id, is_auction_window): - super(AuctionOrBidWindow, self).__init__(start, end) + super().__init__(start, end) self.auction = auction_id self.is_auction_window = is_auction_window diff --git a/sdks/python/apache_beam/testing/load_tests/co_group_by_key_test.py b/sdks/python/apache_beam/testing/load_tests/co_group_by_key_test.py index e7d212baffab..617e00d40f26 100644 --- a/sdks/python/apache_beam/testing/load_tests/co_group_by_key_test.py +++ b/sdks/python/apache_beam/testing/load_tests/co_group_by_key_test.py @@ -90,7 +90,7 @@ class CoGroupByKeyTest(LoadTest): CO_INPUT_TAG = 'pc2' def __init__(self): - super(CoGroupByKeyTest, self).__init__() + super().__init__() self.co_input_options = json.loads( self.pipeline.get_option('co_input_options')) self.iterations = self.get_option_or_default('iterations', 1) diff --git a/sdks/python/apache_beam/testing/load_tests/combine_test.py b/sdks/python/apache_beam/testing/load_tests/combine_test.py index d3a372f645d4..9452730b88f2 100644 --- a/sdks/python/apache_beam/testing/load_tests/combine_test.py +++ b/sdks/python/apache_beam/testing/load_tests/combine_test.py @@ -82,7 +82,7 @@ class CombineTest(LoadTest): def __init__(self): - super(CombineTest, self).__init__() + super().__init__() self.fanout = self.get_option_or_default('fanout', 1) try: self.top_count = int(self.pipeline.get_option('top_count')) diff --git a/sdks/python/apache_beam/testing/load_tests/group_by_key_test.py b/sdks/python/apache_beam/testing/load_tests/group_by_key_test.py index 69ea74c8c262..38724fc17391 100644 --- a/sdks/python/apache_beam/testing/load_tests/group_by_key_test.py +++ b/sdks/python/apache_beam/testing/load_tests/group_by_key_test.py @@ -81,7 +81,7 @@ class GroupByKeyTest(LoadTest): def __init__(self): - super(GroupByKeyTest, self).__init__() + super().__init__() self.fanout = self.get_option_or_default('fanout', 1) self.iterations = self.get_option_or_default('iterations', 1) diff --git a/sdks/python/apache_beam/testing/load_tests/load_test_metrics_utils.py b/sdks/python/apache_beam/testing/load_tests/load_test_metrics_utils.py index f6d33409c7e9..7b975bbe8feb 100644 --- a/sdks/python/apache_beam/testing/load_tests/load_test_metrics_utils.py +++ b/sdks/python/apache_beam/testing/load_tests/load_test_metrics_utils.py @@ -48,7 +48,7 @@ from apache_beam.utils.timestamp import Timestamp try: - from google.cloud import bigquery # type: ignore + from google.cloud import bigquery # type: ignore[attr-defined] from google.cloud.bigquery.schema import SchemaField from google.cloud.exceptions import NotFound except ImportError: @@ -319,8 +319,7 @@ class CounterMetric(Metric): """ def __init__(self, counter_metric, submit_timestamp, metric_id): value = counter_metric.result - super(CounterMetric, - self).__init__(submit_timestamp, metric_id, value, counter_metric) + super().__init__(submit_timestamp, metric_id, value, counter_metric) class DistributionMetric(Metric): @@ -342,7 +341,7 @@ def __init__(self, dist_metric, submit_timestamp, metric_id, metric_type): 'not None.' % custom_label _LOGGER.debug(msg) raise ValueError(msg) - super(DistributionMetric, self) \ + super() \ .__init__(submit_timestamp, metric_id, value, dist_metric, custom_label) @@ -361,8 +360,7 @@ def __init__(self, runtime_list, metric_id): # out of many steps label = runtime_list[0].key.metric.namespace + \ '_' + RUNTIME_METRIC - super(RuntimeMetric, - self).__init__(submit_timestamp, metric_id, value, None, label) + super().__init__(submit_timestamp, metric_id, value, None, label) def _prepare_runtime_metrics(self, distributions): min_values = [] diff --git a/sdks/python/apache_beam/testing/load_tests/microbenchmarks_test.py b/sdks/python/apache_beam/testing/load_tests/microbenchmarks_test.py index 2dff7404f8fe..34d4080c072f 100644 --- a/sdks/python/apache_beam/testing/load_tests/microbenchmarks_test.py +++ b/sdks/python/apache_beam/testing/load_tests/microbenchmarks_test.py @@ -60,7 +60,7 @@ class MicroBenchmarksLoadTest(LoadTest): def __init__(self): - super(MicroBenchmarksLoadTest, self).__init__() + super().__init__() def test(self): self.extra_metrics.update(self._run_fn_api_runner_microbenchmark()) diff --git a/sdks/python/apache_beam/testing/load_tests/pardo_test.py b/sdks/python/apache_beam/testing/load_tests/pardo_test.py index 6722fe33d8ab..989ed2168a66 100644 --- a/sdks/python/apache_beam/testing/load_tests/pardo_test.py +++ b/sdks/python/apache_beam/testing/load_tests/pardo_test.py @@ -88,7 +88,7 @@ class ParDoTest(LoadTest): def __init__(self): - super(ParDoTest, self).__init__() + super().__init__() self.iterations = self.get_option_or_default('iterations') self.number_of_counters = self.get_option_or_default( 'number_of_counters', 1) diff --git a/sdks/python/apache_beam/testing/load_tests/sideinput_test.py b/sdks/python/apache_beam/testing/load_tests/sideinput_test.py index f77e35c3cda9..745d961d2aac 100644 --- a/sdks/python/apache_beam/testing/load_tests/sideinput_test.py +++ b/sdks/python/apache_beam/testing/load_tests/sideinput_test.py @@ -79,7 +79,7 @@ class SideInputTest(LoadTest): SDF_INITIAL_ELEMENTS = 1000 def __init__(self): - super(SideInputTest, self).__init__() + super().__init__() self.windows = self.get_option_or_default('window_count', default=1) self.access_percentage = self.get_option_or_default( diff --git a/sdks/python/apache_beam/testing/metric_result_matchers.py b/sdks/python/apache_beam/testing/metric_result_matchers.py index a4a7f69290a8..3c0386535213 100644 --- a/sdks/python/apache_beam/testing/metric_result_matchers.py +++ b/sdks/python/apache_beam/testing/metric_result_matchers.py @@ -80,8 +80,8 @@ def __init__( self.step = _matcher_or_equal_to(step) self.attempted = _matcher_or_equal_to(attempted) self.committed = _matcher_or_equal_to(committed) - labels = labels or dict() - self.label_matchers = dict() + labels = labels or {} + self.label_matchers = {} for (k, v) in labels.items(): self.label_matchers[_matcher_or_equal_to(k)] = _matcher_or_equal_to(v) diff --git a/sdks/python/apache_beam/testing/metric_result_matchers_test.py b/sdks/python/apache_beam/testing/metric_result_matchers_test.py index 9f4d4086c087..3657356a9fe0 100644 --- a/sdks/python/apache_beam/testing/metric_result_matchers_test.py +++ b/sdks/python/apache_beam/testing/metric_result_matchers_test.py @@ -79,7 +79,7 @@ def _create_metric_result(data_dict): step = data_dict['step'] if 'step' in data_dict else '' - labels = data_dict['labels'] if 'labels' in data_dict else dict() + labels = data_dict['labels'] if 'labels' in data_dict else {} values = {} for key in ['attempted', 'committed']: if key in data_dict: diff --git a/sdks/python/apache_beam/testing/test_pipeline.py b/sdks/python/apache_beam/testing/test_pipeline.py index 910f14997536..9f70e4d5c12e 100644 --- a/sdks/python/apache_beam/testing/test_pipeline.py +++ b/sdks/python/apache_beam/testing/test_pipeline.py @@ -106,10 +106,10 @@ def __init__( self.blocking = blocking if options is None: options = PipelineOptions(self.options_list) - super(TestPipeline, self).__init__(runner, options) + super().__init__(runner, options) def run(self, test_runner_api=True): - result = super(TestPipeline, self).run( + result = super().run( test_runner_api=( False if self.not_use_test_runner_api else test_runner_api)) if self.blocking: diff --git a/sdks/python/apache_beam/testing/test_stream.py b/sdks/python/apache_beam/testing/test_stream.py index d655a9021946..734ca8ee9dda 100644 --- a/sdks/python/apache_beam/testing/test_stream.py +++ b/sdks/python/apache_beam/testing/test_stream.py @@ -286,7 +286,7 @@ def __init__( endpoint: (str) a URL locating a TestStreamService. """ - super(TestStream, self).__init__() + super().__init__() assert coder is not None self.coder = coder diff --git a/sdks/python/apache_beam/testing/test_utils.py b/sdks/python/apache_beam/testing/test_utils.py index b58bfcde8766..46b82bb98686 100644 --- a/sdks/python/apache_beam/testing/test_utils.py +++ b/sdks/python/apache_beam/testing/test_utils.py @@ -23,7 +23,7 @@ # pytype: skip-file import hashlib -import imp +import importlib import os import shutil import tempfile @@ -112,12 +112,12 @@ def patched_retry_with_exponential_backoff(**kwargs): side_effect=patched_retry_with_exponential_backoff).start() # Reload module after patching. - imp.reload(module) + importlib.reload(module) def remove_patches(): patch.stopall() # Reload module again after removing patch. - imp.reload(module) + importlib.reload(module) testcase.addCleanup(remove_patches) diff --git a/sdks/python/apache_beam/testing/util.py b/sdks/python/apache_beam/testing/util.py index c42f90e62101..8c9181289599 100644 --- a/sdks/python/apache_beam/testing/util.py +++ b/sdks/python/apache_beam/testing/util.py @@ -96,11 +96,11 @@ def match(windowed_value): actual = windowed_value.value window_key = windowed_value.windows[0] try: - expected = _expected[window_key] + _expected[window_key] except KeyError: raise BeamAssertException( 'Failed assert: window {} not found in any expected ' \ - 'windows {}'.format(window_key, list(_expected.keys()))) + 'windows {}'.format(window_key, list(_expected.keys())))\ # Remove any matched elements from the window. This is used later on to # assert that all elements in the window were matched with actual @@ -110,7 +110,7 @@ def match(windowed_value): except ValueError: raise BeamAssertException( 'Failed assert: element {} not found in window ' \ - '{}:{}'.format(actual, window_key, _expected[window_key])) + '{}:{}'.format(actual, window_key, _expected[window_key]))\ # Run the matcher for each window and value pair. Fails if the # windowed_value is not a TestWindowedValue. diff --git a/sdks/python/apache_beam/tools/fn_api_runner_microbenchmark.py b/sdks/python/apache_beam/tools/fn_api_runner_microbenchmark.py index a7bf7cbae317..a73b2282e3e8 100644 --- a/sdks/python/apache_beam/tools/fn_api_runner_microbenchmark.py +++ b/sdks/python/apache_beam/tools/fn_api_runner_microbenchmark.py @@ -61,7 +61,6 @@ import random import apache_beam as beam -import apache_beam.typehints.typehints as typehints from apache_beam.coders import VarIntCoder from apache_beam.runners.portability.fn_api_runner import FnApiRunner from apache_beam.tools import utils @@ -69,6 +68,7 @@ from apache_beam.transforms.userstate import SetStateSpec from apache_beam.transforms.userstate import TimerSpec from apache_beam.transforms.userstate import on_timer +from apache_beam.typehints import typehints NUM_PARALLEL_STAGES = 7 diff --git a/sdks/python/apache_beam/tools/teststream_microbenchmark.py b/sdks/python/apache_beam/tools/teststream_microbenchmark.py index 4b00de043df1..7c5bb6135b5c 100644 --- a/sdks/python/apache_beam/tools/teststream_microbenchmark.py +++ b/sdks/python/apache_beam/tools/teststream_microbenchmark.py @@ -45,12 +45,12 @@ import random import apache_beam as beam -import apache_beam.typehints.typehints as typehints from apache_beam import WindowInto from apache_beam.runners import DirectRunner from apache_beam.testing.test_stream import TestStream from apache_beam.tools import utils from apache_beam.transforms.window import FixedWindows +from apache_beam.typehints import typehints NUM_PARALLEL_STAGES = 7 diff --git a/sdks/python/apache_beam/transforms/combinefn_lifecycle_pipeline.py b/sdks/python/apache_beam/transforms/combinefn_lifecycle_pipeline.py index 1964082e8dab..51f66b3c1bb0 100644 --- a/sdks/python/apache_beam/transforms/combinefn_lifecycle_pipeline.py +++ b/sdks/python/apache_beam/transforms/combinefn_lifecycle_pipeline.py @@ -38,7 +38,7 @@ class CallSequenceEnforcingCombineFn(beam.CombineFn): instances = set() # type: Set[CallSequenceEnforcingCombineFn] def __init__(self): - super(CallSequenceEnforcingCombineFn, self).__init__() + super().__init__() self._setup_called = False self._teardown_called = False diff --git a/sdks/python/apache_beam/transforms/combiners.py b/sdks/python/apache_beam/transforms/combiners.py index 41ad3df14178..65e8b047b730 100644 --- a/sdks/python/apache_beam/transforms/combiners.py +++ b/sdks/python/apache_beam/transforms/combiners.py @@ -58,7 +58,7 @@ class CombinerWithoutDefaults(ptransform.PTransform): """Super class to inherit without_defaults to built-in Combiners.""" def __init__(self, has_defaults=True): - super(CombinerWithoutDefaults, self).__init__() + super().__init__() self.has_defaults = has_defaults def with_defaults(self, has_defaults=True): @@ -191,7 +191,7 @@ def __init__(self, n, key=None, reverse=False): reverse: (optional) whether to order things smallest to largest, rather than largest to smallest """ - super(Top.Of, self).__init__() + super().__init__() self._n = n self._key = key self._reverse = reverse @@ -519,7 +519,7 @@ def default_label(self): class Smallest(TopCombineFn): def __init__(self, n): - super(Smallest, self).__init__(n, reverse=True) + super().__init__(n, reverse=True) def default_label(self): return 'Smallest(%s)' % self._n @@ -535,7 +535,7 @@ class Sample(object): class FixedSizeGlobally(CombinerWithoutDefaults): """Sample n elements from the input PCollection without replacement.""" def __init__(self, n): - super(Sample.FixedSizeGlobally, self).__init__() + super().__init__() self._n = n def expand(self, pcoll): @@ -573,7 +573,7 @@ def default_label(self): class SampleCombineFn(core.CombineFn): """CombineFn for all Sample transforms.""" def __init__(self, n): - super(SampleCombineFn, self).__init__() + super().__init__() # Most of this combiner's work is done by a TopCombineFn. We could just # subclass TopCombineFn to make this class, but since sampling is not # really a kind of Top operation, we use a TopCombineFn instance as a @@ -659,10 +659,9 @@ def compact(self, accumulator, *args, **kwargs): ] def extract_output(self, accumulator, *args, **kwargs): - return tuple([ + return tuple( c.extract_output(a, *args, **kwargs) for c, - a in zip(self._combiners, accumulator) - ]) + a in zip(self._combiners, accumulator)) def teardown(self, *args, **kwargs): for c in reversed(self._combiners): @@ -753,7 +752,7 @@ def expand(self, pcoll): class ToDictCombineFn(core.CombineFn): """CombineFn for to_dict.""" def create_accumulator(self): - return dict() + return {} def add_input(self, accumulator, element): key, value = element @@ -761,7 +760,7 @@ def add_input(self, accumulator, element): return accumulator def merge_accumulators(self, accumulators): - result = dict() + result = {} for a in accumulators: result.update(a) return result diff --git a/sdks/python/apache_beam/transforms/core.py b/sdks/python/apache_beam/transforms/core.py index 25b05df14164..c91a7dbb924c 100644 --- a/sdks/python/apache_beam/transforms/core.py +++ b/sdks/python/apache_beam/transforms/core.py @@ -731,7 +731,7 @@ def __init__(self, fn, fullargspec=None): # For cases such as set / list where fn is callable but not a function self.process = lambda element: fn(element) - super(CallableWrapperDoFn, self).__init__() + super().__init__() def display_data(self): # If the callable has a name, then it's likely a function, and @@ -1009,7 +1009,7 @@ def __init__(self, fn, buffer_size=_DEFAULT_BUFFER_SIZE): if not callable(fn): raise TypeError('Expected a callable object instead of: %r' % fn) - super(CallableWrapperCombineFn, self).__init__() + super().__init__() self._fn = fn self._buffer_size = buffer_size @@ -1210,7 +1210,7 @@ class ParDo(PTransformWithSideInputs): exact positions where they appear in the argument lists. """ def __init__(self, fn, *args, **kwargs): - super(ParDo, self).__init__(fn, *args, **kwargs) + super().__init__(fn, *args, **kwargs) # TODO(robertwb): Change all uses of the dofn attribute to use fn instead. self.dofn = self.fn self.output_tags = set() # type: typing.Set[str] @@ -1420,7 +1420,7 @@ def _add_type_constraint_from_consumer(self, full_label, input_type_hints): class _MultiParDo(PTransform): def __init__(self, do_transform, tags, main_tag): - super(_MultiParDo, self).__init__(do_transform.label) + super().__init__(do_transform.label) self._do_transform = do_transform self._tags = tags self._main_tag = main_tag @@ -1848,7 +1848,7 @@ def __init__(self, fn, *args, **kwargs): 'CombineGlobally can be used only with combineFn objects. ' 'Received %r instead.' % (fn)) - super(CombineGlobally, self).__init__() + super().__init__() self.fn = fn self.args = args self.kwargs = kwargs @@ -2136,7 +2136,7 @@ def __init__( combinefn, # type: CombineFn runtime_type_check, # type: bool ): - super(CombineValuesDoFn, self).__init__() + super().__init__() self.combinefn = combinefn self.runtime_type_check = runtime_type_check @@ -2795,7 +2795,7 @@ def __init__( accumulation_mode, timestamp_combiner, allowed_lateness) - super(WindowInto, self).__init__(self.WindowIntoFn(self.windowing)) + super().__init__(self.WindowIntoFn(self.windowing)) def get_windowing(self, unused_inputs): # type: (typing.Any) -> Windowing @@ -2811,7 +2811,7 @@ def expand(self, pcoll): output_type = input_type self.with_input_types(input_type) self.with_output_types(output_type) - return super(WindowInto, self).expand(pcoll) + return super().expand(pcoll) # typing: PTransform base class does not accept extra_kwargs def to_runner_api_parameter(self, context, **extra_kwargs): # type: ignore[override] @@ -2859,7 +2859,7 @@ class Flatten(PTransform): provide pipeline information and should be considered mandatory. """ def __init__(self, **kwargs): - super(Flatten, self).__init__() + super().__init__() self.pipeline = kwargs.pop( 'pipeline', None) # type: typing.Optional[Pipeline] if kwargs: @@ -2905,7 +2905,7 @@ def __init__(self, values, reshuffle=True): Args: values: An object of values for the PCollection """ - super(Create, self).__init__() + super().__init__() if isinstance(values, (str, bytes)): raise TypeError( 'PTransform Create: Refusing to treat string as ' diff --git a/sdks/python/apache_beam/transforms/deduplicate_test.py b/sdks/python/apache_beam/transforms/deduplicate_test.py index b6ec53dacef0..392dac20fb82 100644 --- a/sdks/python/apache_beam/transforms/deduplicate_test.py +++ b/sdks/python/apache_beam/transforms/deduplicate_test.py @@ -46,7 +46,7 @@ class DeduplicateTest(unittest.TestCase): def __init__(self, *args, **kwargs): self.runner = None self.options = None - super(DeduplicateTest, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def set_runner(self, runner): self.runner = runner diff --git a/sdks/python/apache_beam/transforms/external.py b/sdks/python/apache_beam/transforms/external.py index f8e6ddc5487b..7b3964a52c35 100644 --- a/sdks/python/apache_beam/transforms/external.py +++ b/sdks/python/apache_beam/transforms/external.py @@ -140,7 +140,7 @@ def __init__(self, tuple_instance): """ :param tuple_instance: an instance of a typing.NamedTuple """ - super(NamedTupleBasedPayloadBuilder, self).__init__() + super().__init__() self._tuple_instance = tuple_instance def _get_named_tuple_instance(self): @@ -636,7 +636,7 @@ class ExpansionAndArtifactRetrievalStub( def __init__(self, channel, **kwargs): self._channel = channel self._kwargs = kwargs - super(ExpansionAndArtifactRetrievalStub, self).__init__(channel, **kwargs) + super().__init__(channel, **kwargs) def artifact_service(self): return beam_artifact_api_pb2_grpc.ArtifactRetrievalServiceStub( @@ -686,7 +686,7 @@ class BeamJarExpansionService(JavaJarExpansionService): def __init__(self, gradle_target, extra_args=None, gradle_appendix=None): path_to_jar = subprocess_server.JavaJarServer.path_to_beam_jar( gradle_target, gradle_appendix) - super(BeamJarExpansionService, self).__init__(path_to_jar, extra_args) + super().__init__(path_to_jar, extra_args) def memoize(func): diff --git a/sdks/python/apache_beam/transforms/external_test.py b/sdks/python/apache_beam/transforms/external_test.py index a528384575dc..83e72461139b 100644 --- a/sdks/python/apache_beam/transforms/external_test.py +++ b/sdks/python/apache_beam/transforms/external_test.py @@ -335,7 +335,7 @@ def __init__( mapping: typing.Mapping[str, float], optional_integer: typing.Optional[int] = None, expansion_service=None): - super(AnnotatedTransform, self).__init__( + super().__init__( self.URN, AnnotationBasedPayloadBuilder( self, @@ -363,7 +363,7 @@ def __init__( mapping: typehints.Dict[str, float], optional_integer: typehints.Optional[int] = None, expansion_service=None): - super(AnnotatedTransform, self).__init__( + super().__init__( self.URN, AnnotationBasedPayloadBuilder( self, diff --git a/sdks/python/apache_beam/transforms/periodicsequence.py b/sdks/python/apache_beam/transforms/periodicsequence.py index 9fc902992d40..797632361c8a 100644 --- a/sdks/python/apache_beam/transforms/periodicsequence.py +++ b/sdks/python/apache_beam/transforms/periodicsequence.py @@ -19,9 +19,9 @@ import time import apache_beam as beam -import apache_beam.runners.sdf_utils as sdf_utils from apache_beam.io.restriction_trackers import OffsetRange from apache_beam.io.restriction_trackers import OffsetRestrictionTracker +from apache_beam.runners import sdf_utils from apache_beam.transforms import core from apache_beam.transforms import window from apache_beam.transforms.ptransform import PTransform @@ -124,6 +124,8 @@ class PeriodicSequence(PTransform): PeriodicSequence guarantees that elements would not be output prior to given runtime timestamp. ''' + + # pylint: disable=unused-private-member def __init_(self): pass diff --git a/sdks/python/apache_beam/transforms/ptransform.py b/sdks/python/apache_beam/transforms/ptransform.py index b314abd20c90..40626aa80043 100644 --- a/sdks/python/apache_beam/transforms/ptransform.py +++ b/sdks/python/apache_beam/transforms/ptransform.py @@ -202,8 +202,7 @@ def __reduce__(self): class _MaterializedDoOutputsTuple(pvalue.DoOutputsTuple): def __init__(self, deferred, results_by_tag): - super(_MaterializedDoOutputsTuple, - self).__init__(None, None, deferred._tags, deferred._main_tag) + super().__init__(None, None, deferred._tags, deferred._main_tag) self._deferred = deferred self._results_by_tag = results_by_tag @@ -351,7 +350,7 @@ class PTransform(WithTypeHints, HasDisplayData): def __init__(self, label=None): # type: (Optional[str]) -> None - super(PTransform, self).__init__() + super().__init__() self.label = label # type: ignore # https://github.com/python/mypy/issues/3004 @property @@ -402,7 +401,7 @@ def with_input_types(self, input_type_hint): input_type_hint) validate_composite_type_param( input_type_hint, 'Type hints for a PTransform') - return super(PTransform, self).with_input_types(input_type_hint) + return super().with_input_types(input_type_hint) def with_output_types(self, type_hint): """Annotates the output type of a :class:`PTransform` with a type-hint. @@ -423,7 +422,7 @@ def with_output_types(self, type_hint): """ type_hint = native_type_compatibility.convert_to_beam_type(type_hint) validate_composite_type_param(type_hint, 'Type hints for a PTransform') - return super(PTransform, self).with_output_types(type_hint) + return super().with_output_types(type_hint) def with_resource_hints(self, **kwargs): # type: (...) -> PTransform """Adds resource hints to the :class:`PTransform`. @@ -806,7 +805,7 @@ def _unpickle_transform(unused_ptransform, pickled_bytes, unused_context): class _ChainedPTransform(PTransform): def __init__(self, *parts): # type: (*PTransform) -> None - super(_ChainedPTransform, self).__init__(label=self._chain_label(parts)) + super().__init__(label=self._chain_label(parts)) self._parts = parts def _chain_label(self, parts): @@ -842,10 +841,10 @@ def __init__(self, fn, *args, **kwargs): raise ValueError('Use %s() not %s.' % (fn.__name__, fn.__name__)) self.fn = self.make_fn(fn, bool(args or kwargs)) # Now that we figure out the label, initialize the super-class. - super(PTransformWithSideInputs, self).__init__() + super().__init__() - if (any([isinstance(v, pvalue.PCollection) for v in args]) or - any([isinstance(v, pvalue.PCollection) for v in kwargs.values()])): + if (any(isinstance(v, pvalue.PCollection) for v in args) or + any(isinstance(v, pvalue.PCollection) for v in kwargs.values())): raise error.SideInputError( 'PCollection used directly as side input argument. Specify ' 'AsIter(pcollection) or AsSingleton(pcollection) to indicate how the ' @@ -898,7 +897,7 @@ def with_input_types( :class:`PTransform` object. This allows chaining type-hinting related methods. """ - super(PTransformWithSideInputs, self).with_input_types(input_type_hint) + super().with_input_types(input_type_hint) side_inputs_arg_hints = native_type_compatibility.convert_to_beam_types( side_inputs_arg_hints) @@ -964,7 +963,7 @@ def default_label(self): class _PTransformFnPTransform(PTransform): """A class wrapper for a function-based transform.""" def __init__(self, fn, *args, **kwargs): - super(_PTransformFnPTransform, self).__init__() + super().__init__() self._fn = fn self._args = args self._kwargs = kwargs @@ -1031,7 +1030,7 @@ def CustomMapper(pcoll, mapfn): class CustomMapper(PTransform): def __init__(self, mapfn): - super(CustomMapper, self).__init__() + super().__init__() self.mapfn = mapfn def expand(self, pcoll): @@ -1084,7 +1083,7 @@ def label_from_callable(fn): class _NamedPTransform(PTransform): def __init__(self, transform, label): - super(_NamedPTransform, self).__init__(label) + super().__init__(label) self.transform = transform def __ror__(self, pvalueish, _unused=None): diff --git a/sdks/python/apache_beam/transforms/ptransform_test.py b/sdks/python/apache_beam/transforms/ptransform_test.py index ccf0a55d5ea5..e323d2d32db6 100644 --- a/sdks/python/apache_beam/transforms/ptransform_test.py +++ b/sdks/python/apache_beam/transforms/ptransform_test.py @@ -34,9 +34,9 @@ import pytest import apache_beam as beam -import apache_beam.pvalue as pvalue import apache_beam.transforms.combiners as combine -import apache_beam.typehints as typehints +from apache_beam import pvalue +from apache_beam import typehints from apache_beam.io.iobase import Read from apache_beam.metrics import Metrics from apache_beam.metrics.metric import MetricsFilter diff --git a/sdks/python/apache_beam/transforms/sql.py b/sdks/python/apache_beam/transforms/sql.py index 30d546443d06..4102c732bfbd 100644 --- a/sdks/python/apache_beam/transforms/sql.py +++ b/sdks/python/apache_beam/transforms/sql.py @@ -85,7 +85,7 @@ def __init__(self, query, dialect=None, expansion_service=None): """ expansion_service = expansion_service or BeamJarExpansionService( ':sdks:java:extensions:sql:expansion-service:shadowJar') - super(SqlTransform, self).__init__( + super().__init__( self.URN, NamedTupleBasedPayloadBuilder( SqlTransformSchema(query=query, dialect=dialect)), diff --git a/sdks/python/apache_beam/transforms/stats_test.py b/sdks/python/apache_beam/transforms/stats_test.py index 739438035c88..bf634c003a07 100644 --- a/sdks/python/apache_beam/transforms/stats_test.py +++ b/sdks/python/apache_beam/transforms/stats_test.py @@ -621,7 +621,7 @@ def _build_quantilebuffer_test_data(): [391, 977, 1221, 1526, 954], [782, 977, 1221, 1526, 1908], [3125, 3907, 9766, 12208, 15259]] - test_data = list() + test_data = [] i = 0 for epsilon in epsilons: j = 0 diff --git a/sdks/python/apache_beam/transforms/trigger.py b/sdks/python/apache_beam/transforms/trigger.py index 452f2c283717..d7a17aca7796 100644 --- a/sdks/python/apache_beam/transforms/trigger.py +++ b/sdks/python/apache_beam/transforms/trigger.py @@ -111,7 +111,7 @@ class _CombiningValueStateTag(_StateTag): # TODO(robertwb): Also store the coder (perhaps extracted from the combine_fn) def __init__(self, tag, combine_fn): - super(_CombiningValueStateTag, self).__init__(tag) + super().__init__(tag) if not combine_fn: raise ValueError('combine_fn must be specified.') if not isinstance(combine_fn, core.CombineFn): @@ -148,7 +148,7 @@ def with_prefix(self, prefix): class _WatermarkHoldStateTag(_StateTag): def __init__(self, tag, timestamp_combiner_impl): - super(_WatermarkHoldStateTag, self).__init__(tag) + super().__init__(tag) self.timestamp_combiner_impl = timestamp_combiner_impl def __repr__(self): @@ -1236,7 +1236,7 @@ def process_entire_key(self, key, windowed_values): class _UnwindowedValues(observable.ObservableMixin): """Exposes iterable of windowed values as iterable of unwindowed values.""" def __init__(self, windowed_values): - super(_UnwindowedValues, self).__init__() + super().__init__() self._windowed_values = windowed_values def __iter__(self): diff --git a/sdks/python/apache_beam/transforms/userstate.py b/sdks/python/apache_beam/transforms/userstate.py index 84184d4bde02..12761182e6ca 100644 --- a/sdks/python/apache_beam/transforms/userstate.py +++ b/sdks/python/apache_beam/transforms/userstate.py @@ -134,7 +134,7 @@ def __init__(self, name, coder=None, combine_fn=None): if coder is None: coder = self.combine_fn.get_accumulator_coder() - super(CombiningValueStateSpec, self).__init__(name, coder) + super().__init__(name, coder) def to_runner_api(self, context): # type: (PipelineContext) -> beam_runner_api_pb2.StateSpec diff --git a/sdks/python/apache_beam/transforms/util.py b/sdks/python/apache_beam/transforms/util.py index 63bbece98333..77c7f8534b8b 100644 --- a/sdks/python/apache_beam/transforms/util.py +++ b/sdks/python/apache_beam/transforms/util.py @@ -632,7 +632,7 @@ def __init__(self, window_coder): Arguments: window_coder: coders.Coder object to be used on windows. """ - super(_IdentityWindowFn, self).__init__() + super().__init__() if window_coder is None: raise ValueError('window_coder should not be None') self._window_coder = window_coder @@ -768,9 +768,9 @@ def WithKeys(pcoll, k, *args, **kwargs): """ if callable(k): if fn_takes_side_inputs(k): - if all([isinstance(arg, AsSideInput) - for arg in args]) and all([isinstance(kwarg, AsSideInput) - for kwarg in kwargs.values()]): + if all(isinstance(arg, AsSideInput) + for arg in args) and all(isinstance(kwarg, AsSideInput) + for kwarg in kwargs.values()): return pcoll | Map( lambda v, *args, @@ -963,6 +963,7 @@ def process( if count == 1 and max_buffering_duration_secs > 0: # This is the first element in batch. Start counting buffering time if a # limit was set. + # pylint: disable=deprecated-method buffering_timer.set(clock() + max_buffering_duration_secs) if count >= batch_size: return self.flush_batch(element_state, count_state, buffering_timer) diff --git a/sdks/python/apache_beam/transforms/window.py b/sdks/python/apache_beam/transforms/window.py index de16c73079b9..522f6615c090 100644 --- a/sdks/python/apache_beam/transforms/window.py +++ b/sdks/python/apache_beam/transforms/window.py @@ -318,7 +318,7 @@ def __new__(cls): def __init__(self): # type: () -> None - super(GlobalWindow, self).__init__(GlobalWindow._getTimestampFromProto()) + super().__init__(GlobalWindow._getTimestampFromProto()) def __repr__(self): return 'GlobalWindow' diff --git a/sdks/python/apache_beam/transforms/window_test.py b/sdks/python/apache_beam/transforms/window_test.py index 7369090729d6..9b2254f54d14 100644 --- a/sdks/python/apache_beam/transforms/window_test.py +++ b/sdks/python/apache_beam/transforms/window_test.py @@ -149,7 +149,7 @@ def merge(*timestamps): class TestMergeContext(WindowFn.MergeContext): def __init__(self): - super(TestMergeContext, self).__init__(running) + super().__init__(running) def merge(self, to_be_merged, merge_result): for w in to_be_merged: diff --git a/sdks/python/apache_beam/typehints/trivial_inference.py b/sdks/python/apache_beam/typehints/trivial_inference.py index cc6534da6f57..0d760db43958 100644 --- a/sdks/python/apache_beam/typehints/trivial_inference.py +++ b/sdks/python/apache_beam/typehints/trivial_inference.py @@ -53,7 +53,7 @@ def instance_to_type(o): (name, instance_to_type(value)) for name, value in o.as_dict().items() ]) elif t not in typehints.DISALLOWED_PRIMITIVE_TYPES: - # pylint: disable=deprecated-types-field + # pylint: disable=bad-option-value if t == BoundMethod: return types.MethodType return t diff --git a/sdks/python/apache_beam/typehints/typecheck.py b/sdks/python/apache_beam/typehints/typecheck.py index 6c4ba250ec80..9fe18047f3e8 100644 --- a/sdks/python/apache_beam/typehints/typecheck.py +++ b/sdks/python/apache_beam/typehints/typecheck.py @@ -44,7 +44,7 @@ class AbstractDoFnWrapper(DoFn): """An abstract class to create wrapper around DoFn""" def __init__(self, dofn): - super(AbstractDoFnWrapper, self).__init__() + super().__init__() self.dofn = dofn def _inspect_start_bundle(self): @@ -78,7 +78,7 @@ def teardown(self): class OutputCheckWrapperDoFn(AbstractDoFnWrapper): """A DoFn that verifies against common errors in the output type.""" def __init__(self, dofn, full_label): - super(OutputCheckWrapperDoFn, self).__init__(dofn) + super().__init__(dofn) self.full_label = full_label def wrapper(self, method, args, kwargs): @@ -116,7 +116,7 @@ class TypeCheckWrapperDoFn(AbstractDoFnWrapper): """A wrapper around a DoFn which performs type-checking of input and output. """ def __init__(self, dofn, type_hints, label=None): - super(TypeCheckWrapperDoFn, self).__init__(dofn) + super().__init__(dofn) self._process_fn = self.dofn._process_argspec_fn() if type_hints.input_types: input_args, input_kwargs = type_hints.input_types diff --git a/sdks/python/apache_beam/typehints/typehints.py b/sdks/python/apache_beam/typehints/typehints.py index 136ad016b167..7c1a8138a5e7 100644 --- a/sdks/python/apache_beam/typehints/typehints.py +++ b/sdks/python/apache_beam/typehints/typehints.py @@ -251,7 +251,7 @@ def type_check(self, sequence_instance): for index, elem in enumerate(sequence_instance): try: check_constraint(self.inner_type, elem) - except SimpleTypeHintError as e: + except SimpleTypeHintError: raise CompositeTypeHintError( '%s hint type-constraint violated. The type of element #%s in ' 'the passed %s is incorrect. Expected an instance of type %s, ' @@ -599,7 +599,7 @@ class TupleHint(CompositeTypeHint): """ class TupleSequenceConstraint(SequenceTypeConstraint): def __init__(self, type_param): - super(TupleHint.TupleSequenceConstraint, self).__init__(type_param, tuple) + super().__init__(type_param, tuple) def __repr__(self): return 'Tuple[%s, ...]' % _unified_repr(self.inner_type) @@ -610,7 +610,7 @@ def _consistent_with_check_(self, sub): return all( is_consistent_with(elem, self.inner_type) for elem in sub.tuple_types) - return super(TupleSequenceConstraint, self)._consistent_with_check_(sub) + return super()._consistent_with_check_(sub) class TupleConstraint(IndexableTypeConstraint): def __init__(self, type_params): @@ -731,7 +731,7 @@ class ListHint(CompositeTypeHint): """ class ListConstraint(SequenceTypeConstraint): def __init__(self, list_type): - super(ListHint.ListConstraint, self).__init__(list_type, list) + super().__init__(list_type, list) def __repr__(self): return 'List[%s]' % _unified_repr(self.inner_type) @@ -912,7 +912,7 @@ class SetHint(CompositeTypeHint): """ class SetTypeConstraint(SequenceTypeConstraint): def __init__(self, type_param): - super(SetHint.SetTypeConstraint, self).__init__(type_param, set) + super().__init__(type_param, set) def __repr__(self): return 'Set[%s]' % _unified_repr(self.inner_type) diff --git a/sdks/python/apache_beam/typehints/typehints_test.py b/sdks/python/apache_beam/typehints/typehints_test.py index 2e0658c94358..2b39b60be3fe 100644 --- a/sdks/python/apache_beam/typehints/typehints_test.py +++ b/sdks/python/apache_beam/typehints/typehints_test.py @@ -24,7 +24,6 @@ import typing import unittest -import apache_beam.typehints.typehints as typehints from apache_beam import Map from apache_beam import PTransform from apache_beam.pvalue import PBegin @@ -38,6 +37,7 @@ from apache_beam.typehints import TypeCheckError from apache_beam.typehints import Union from apache_beam.typehints import native_type_compatibility +from apache_beam.typehints import typehints from apache_beam.typehints import with_input_types from apache_beam.typehints import with_output_types from apache_beam.typehints.decorators import GeneratorWrapper diff --git a/sdks/python/apache_beam/utils/counters.py b/sdks/python/apache_beam/utils/counters.py index 320a9cdcf024..bcc883e52758 100644 --- a/sdks/python/apache_beam/utils/counters.py +++ b/sdks/python/apache_beam/utils/counters.py @@ -114,7 +114,7 @@ def __new__( output_index=None, io_target=None): origin = origin or CounterName.SYSTEM - return super(CounterName, cls).__new__( + return super().__new__( cls, name, stage_name, @@ -206,7 +206,7 @@ class AccumulatorCombineFnCounter(Counter): def __init__(self, name, combine_fn): # type: (CounterName, cy_combiners.AccumulatorCombineFn) -> None assert isinstance(combine_fn, cy_combiners.AccumulatorCombineFn) - super(AccumulatorCombineFnCounter, self).__init__(name, combine_fn) + super().__init__(name, combine_fn) self.reset() def update(self, value): @@ -268,4 +268,4 @@ def get_counters(self): this method returns hence the returned iterable may be stale. """ with self._lock: - return self.counters.values() # pylint: disable=dict-values-not-iterating + return self.counters.values() # pylint: disable=bad-option-value diff --git a/sdks/python/apache_beam/utils/histogram.py b/sdks/python/apache_beam/utils/histogram.py index 13bb5c274d2b..83533c5c950a 100644 --- a/sdks/python/apache_beam/utils/histogram.py +++ b/sdks/python/apache_beam/utils/histogram.py @@ -107,7 +107,7 @@ def _format(f): elif f == float('inf'): return '>=%s' % self._bucket_type.range_to() else: - return str(int(round(f))) # pylint: disable=round-builtin + return str(int(round(f))) # pylint: disable=bad-option-value with self._lock: return ( diff --git a/sdks/python/apache_beam/utils/profiler.py b/sdks/python/apache_beam/utils/profiler.py index de9f9434dc05..d10703c17289 100644 --- a/sdks/python/apache_beam/utils/profiler.py +++ b/sdks/python/apache_beam/utils/profiler.py @@ -23,7 +23,7 @@ # pytype: skip-file # mypy: check-untyped-defs -import cProfile # pylint: disable=bad-python3-import +import cProfile import io import logging import os diff --git a/sdks/python/apache_beam/utils/shared.py b/sdks/python/apache_beam/utils/shared.py index 79d70279466c..23622eff346f 100644 --- a/sdks/python/apache_beam/utils/shared.py +++ b/sdks/python/apache_beam/utils/shared.py @@ -72,7 +72,7 @@ def process(self, element, table_elements): def construct_table(): # Construct the rainbow table from the table elements. # The table contains lines in the form "string::hash" - result = dict() + result = {} for key, value in table_elements: result[value] = key return result @@ -209,7 +209,7 @@ def __init__(self): self._lock = threading.Lock() # Dictionary of references to shared control blocks - self._cache_map = dict() + self._cache_map = {} # Tuple of (key, obj), where obj is an object we explicitly hold a reference # to keep it alive diff --git a/sdks/python/apache_beam/utils/subprocess_server.py b/sdks/python/apache_beam/utils/subprocess_server.py index d1466a295892..7035cad235c5 100644 --- a/sdks/python/apache_beam/utils/subprocess_server.py +++ b/sdks/python/apache_beam/utils/subprocess_server.py @@ -159,7 +159,7 @@ class JavaJarServer(SubprocessServer): dict(__init__=lambda self: setattr(self, 'replacements', {})))() def __init__(self, stub_class, path_to_jar, java_arguments): - super(JavaJarServer, self).__init__( + super().__init__( stub_class, ['java', '-jar', path_to_jar] + list(java_arguments)) self._existing_service = path_to_jar if _is_service_endpoint( path_to_jar) else None @@ -172,13 +172,13 @@ def start_process(self): raise RuntimeError( 'Java must be installed on this system to use this ' 'transform/runner.') - return super(JavaJarServer, self).start_process() + return super().start_process() def stop_process(self): if self._existing_service: pass else: - return super(JavaJarServer, self).stop_process() + return super().stop_process() @classmethod def jar_name(cls, artifact_id, version, classifier=None, appendix=None): diff --git a/sdks/python/apache_beam/utils/thread_pool_executor.py b/sdks/python/apache_beam/utils/thread_pool_executor.py index afe172af6c7e..e1e8ad5c43a6 100644 --- a/sdks/python/apache_beam/utils/thread_pool_executor.py +++ b/sdks/python/apache_beam/utils/thread_pool_executor.py @@ -41,7 +41,7 @@ def run(self): class _Worker(threading.Thread): def __init__(self, idle_worker_queue, work_item): - super(_Worker, self).__init__() + super().__init__() self._idle_worker_queue = idle_worker_queue self._work_item = work_item self._wake_semaphore = threading.Semaphore(0) diff --git a/sdks/python/gen_protos.py b/sdks/python/gen_protos.py index 2fa3cacdf675..4e63078bff70 100644 --- a/sdks/python/gen_protos.py +++ b/sdks/python/gen_protos.py @@ -60,8 +60,8 @@ def generate_urn_files(log, out_dir): This is executed at build time rather than dynamically on import to ensure that it is compatible with static type checkers like mypy. """ - import google.protobuf.message as message import google.protobuf.pyext._message as pyext_message + from google.protobuf import message class Context(object): INDENT = ' ' @@ -307,6 +307,7 @@ def generate_proto_files(force=False, log=None): p.join() if p.exitcode: raise ValueError("Proto generation failed (see log for details).") + else: log.info('Regenerating Python proto definitions (%s).' % regenerate) builtin_protos = pkg_resources.resource_filename('grpc_tools', '_proto') diff --git a/sdks/python/setup.py b/sdks/python/setup.py index 34f96c1ffaf7..067deb416861 100644 --- a/sdks/python/setup.py +++ b/sdks/python/setup.py @@ -242,7 +242,7 @@ def generate_protos_first(original_cmd): class cmd(original_cmd, object): def run(self): gen_protos.generate_proto_files() - super(cmd, self).run() + super().run() return cmd except ImportError: diff --git a/sdks/python/tox.ini b/sdks/python/tox.ini index f3946cb46645..ca3e0b863740 100644 --- a/sdks/python/tox.ini +++ b/sdks/python/tox.ini @@ -110,9 +110,9 @@ setenv = # keep the version of pylint in sync with the 'rev' in .pre-commit-config.yaml deps = -r build-requirements.txt - astroid<2.4,>=2.3.0 + astroid<2.9,>=2.8.0 pycodestyle==2.3.1 - pylint==2.4.3 + pylint==2.11.1 isort==4.2.15 flake8==3.5.0 commands = diff --git a/website/www/site/content/en/documentation/patterns/cross-language.md b/website/www/site/content/en/documentation/patterns/cross-language.md index 8cdd6a37658f..b67db17477f4 100644 --- a/website/www/site/content/en/documentation/patterns/cross-language.md +++ b/website/www/site/content/en/documentation/patterns/cross-language.md @@ -95,7 +95,7 @@ URN = "beam:transforms:xlang:pythontransform" @ptransform.PTransform.register_urn(URN, None) class PythonTransform(ptransform.PTransform): def __init__(self): - super(PythonTransform, self).__init__() + super().__init__() def expand(self, pcoll): return (pcoll