diff --git a/metaflow/client/filecache.py b/metaflow/client/filecache.py index 5adf1a06c60..98900b15482 100644 --- a/metaflow/client/filecache.py +++ b/metaflow/client/filecache.py @@ -23,7 +23,6 @@ def od_move_to_end(od, key): od.move_to_end(key) - else: # Not very efficient but works and most people are on 3.2+ def od_move_to_end(od, key): @@ -320,7 +319,7 @@ def _task_ds_id(ds_type, ds_root, flow_name, run_id, step_name, task_id, attempt def _garbage_collect(self): now = time.time() - while self._objects and self._total > self._max_size * 1024 ** 2: + while self._objects and self._total > self._max_size * 1024**2: if now - self._objects[0][0] < NEW_FILE_QUARANTINE: break ctime, size, path = self._objects.pop(0) diff --git a/metaflow/datatools/s3.py b/metaflow/datatools/s3.py index ca329f13b9d..c5b654aa58a 100644 --- a/metaflow/datatools/s3.py +++ b/metaflow/datatools/s3.py @@ -932,7 +932,7 @@ def _one_boto_op(self, op, url, create_tmp_file=True): os.unlink(tmp.name) self._s3_client.reset_client() # add some jitter to make sure retries are not synchronized - time.sleep(2 ** i + random.randint(0, 10)) + time.sleep(2**i + random.randint(0, 10)) raise MetaflowS3Exception( "S3 operation failed.\n" "Key requested: %s\n" "Error: %s" % (url, error) ) @@ -1048,6 +1048,6 @@ def _s3op_with_retries(self, mode, **options): elif ex.returncode == s3op.ERROR_URL_ACCESS_DENIED: raise MetaflowS3AccessDenied(err_out) print("Error with S3 operation:", err_out) - time.sleep(2 ** i + random.randint(0, 10)) + time.sleep(2**i + random.randint(0, 10)) return None, err_out diff --git a/metaflow/datatools/s3op.py b/metaflow/datatools/s3op.py index 966fe43070c..c11c1a609cd 100644 --- a/metaflow/datatools/s3op.py +++ b/metaflow/datatools/s3op.py @@ -307,10 +307,10 @@ def process_urls(mode, urls, verbose, num_workers, s3role): def with_unit(x): - if x > 1024 ** 3: - return "%.1fGB" % (x / 1024.0 ** 3) - elif x > 1024 ** 2: - return "%.1fMB" % (x / 1024.0 ** 2) + if x > 1024**3: + return "%.1fGB" % (x / 1024.0**3) + elif x > 1024**2: + return "%.1fMB" % (x / 1024.0**2) elif x > 1024: return "%.1fKB" % (x / 1024.0) else: diff --git a/metaflow/datatools/s3util.py b/metaflow/datatools/s3util.py index acd07947751..330013818c1 100644 --- a/metaflow/datatools/s3util.py +++ b/metaflow/datatools/s3util.py @@ -59,7 +59,7 @@ def retry_wrapper(self, *args, **kwargs): last_exc = ex # exponential backoff for real failures if not (TEST_S3_RETRY and i == 0): - time.sleep(2 ** i + random.randint(0, 5)) + time.sleep(2**i + random.randint(0, 5)) raise last_exc return retry_wrapper diff --git a/metaflow/includefile.py b/metaflow/includefile.py index 9131c012ed6..921b8095a08 100644 --- a/metaflow/includefile.py +++ b/metaflow/includefile.py @@ -126,9 +126,9 @@ def __exit__(self, *args): def _path(self, key): key = to_unicode(key) - if key.startswith(u"local://"): + if key.startswith("local://"): return key[8:] - elif key[0] != u"/": + elif key[0] != "/": if current.is_running_flow: raise MetaflowLocalURLException( "Specify Local(run=self) when you use Local inside a running " @@ -145,7 +145,7 @@ def _path(self, key): def get(self, key=None, return_missing=False): p = self._path(key) - url = u"local://%s" % p + url = "local://%s" % p if not os.path.isfile(p): if return_missing: p = None @@ -159,7 +159,7 @@ def put(self, key, obj, overwrite=True): Local._makedirs(os.path.dirname(p)) with open(p, "wb") as f: f.write(obj) - return u"local://%s" % p + return "local://%s" % p # From here on out, this is the IncludeFile implementation. @@ -190,7 +190,7 @@ def is_file_handled(cls, path): ) path = decoded_value["url"] for prefix, handler in DATACLIENTS.items(): - if path.startswith(u"%s://" % prefix): + if path.startswith("%s://" % prefix): return True, Uploader(handler), None try: with open(path, mode="r") as _: diff --git a/metaflow/metadata/heartbeat.py b/metaflow/metadata/heartbeat.py index d2f01f2fb4b..cd1ba5dee59 100644 --- a/metaflow/metadata/heartbeat.py +++ b/metaflow/metadata/heartbeat.py @@ -49,7 +49,7 @@ def ping(self): retry_counter = 0 except HeartBeatException as e: retry_counter = retry_counter + 1 - time.sleep(4 ** retry_counter) + time.sleep(4**retry_counter) def heartbeat(self): if self.hb_url is not None: diff --git a/metaflow/plugins/aws/batch/batch_cli.py b/metaflow/plugins/aws/batch/batch_cli.py index 3814af7814f..68ad4e475c7 100644 --- a/metaflow/plugins/aws/batch/batch_cli.py +++ b/metaflow/plugins/aws/batch/batch_cli.py @@ -201,7 +201,7 @@ def echo(msg, stream="stderr", batch_id=None): if num_parallel and num_parallel > 1: # For multinode, we need to add a placeholder that can be mutated by the caller step_args += " [multinode-args]" - step_cli = u"{entrypoint} {top_args} step {step} {step_args}".format( + step_cli = "{entrypoint} {top_args} step {step} {step_args}".format( entrypoint=entrypoint, top_args=top_args, step=step_name, diff --git a/metaflow/plugins/aws/eks/kubernetes_cli.py b/metaflow/plugins/aws/eks/kubernetes_cli.py index 73fad2a7aee..929fdf11197 100644 --- a/metaflow/plugins/aws/eks/kubernetes_cli.py +++ b/metaflow/plugins/aws/eks/kubernetes_cli.py @@ -153,7 +153,7 @@ def echo(msg, stream="stderr", job_id=None): ) time.sleep(minutes_between_retries * 60) - step_cli = u"{entrypoint} {top_args} step {step} {step_args}".format( + step_cli = "{entrypoint} {top_args} step {step} {step_args}".format( entrypoint="%s -u %s" % (executable, os.path.basename(sys.argv[0])), top_args=" ".join(util.dict_to_cli_options(ctx.parent.parent.params)), step=step_name, diff --git a/metaflow/plugins/cards/card_modules/chevron/renderer.py b/metaflow/plugins/cards/card_modules/chevron/renderer.py index 571cab293b1..e95bdd2a722 100644 --- a/metaflow/plugins/cards/card_modules/chevron/renderer.py +++ b/metaflow/plugins/cards/card_modules/chevron/renderer.py @@ -23,7 +23,6 @@ def unicode(x, y): return x - else: # python 2 python3 = False unicode_type = unicode diff --git a/metaflow/plugins/env_escape/data_transferer.py b/metaflow/plugins/env_escape/data_transferer.py index 51225b17a39..68b244b214e 100644 --- a/metaflow/plugins/env_escape/data_transferer.py +++ b/metaflow/plugins/env_escape/data_transferer.py @@ -165,7 +165,6 @@ def _dump_invalidunicode(obj_type, transferer, obj): def _load_invalidunicode(obj_type, transferer, json_annotation, json_obj): return _load_simple(str, transferer, json_annotation, json_obj) - else: @_register_dumper((str,)) diff --git a/metaflow/plugins/metadata/service.py b/metaflow/plugins/metadata/service.py index 2bccd75af05..a8a7c0aeddf 100644 --- a/metaflow/plugins/metadata/service.py +++ b/metaflow/plugins/metadata/service.py @@ -376,7 +376,7 @@ def _request(cls, monitor, path, data=None, retry_409_path=None): resp.status_code, resp.text, ) - time.sleep(2 ** i) + time.sleep(2**i) if resp: raise ServiceException( @@ -423,7 +423,7 @@ def _version(cls, monitor): resp.status_code, resp.text, ) - time.sleep(2 ** i) + time.sleep(2**i) if resp: raise ServiceException( "Metadata request (%s) failed (code %s): %s" diff --git a/metaflow/plugins/test_unbounded_foreach_decorator.py b/metaflow/plugins/test_unbounded_foreach_decorator.py index ff6dfd0ad48..e5f2962fa3c 100644 --- a/metaflow/plugins/test_unbounded_foreach_decorator.py +++ b/metaflow/plugins/test_unbounded_foreach_decorator.py @@ -111,11 +111,11 @@ def control_task_step_func(self, flow, graph, retry_count): cmd = cli_args.step_command( executable, script, step_name, step_kwargs=kwargs ) - step_cli = u" ".join(cmd) + step_cli = " ".join(cmd) # Print cmdline for execution. Doesn't work without the temporary # unicode object while using `print`. print( - u"[${cwd}] Starting split#{split} with cmd:{cmd}".format( + "[${cwd}] Starting split#{split} with cmd:{cmd}".format( cwd=os.getcwd(), split=i, cmd=step_cli ) )