Skip to content

Commit

Permalink
Upgrade syntax to Python 3.7
Browse files Browse the repository at this point in the history
using `ack --type=python -f | grep -v '^tools/' | xargs pyupgrade --py37-plus`

Also more refactoring of Python files in `packages/` .
  • Loading branch information
nsoranzo committed Mar 8, 2022
1 parent d7cb152 commit 00817ea
Show file tree
Hide file tree
Showing 100 changed files with 503 additions and 454 deletions.
4 changes: 2 additions & 2 deletions config/plugins/webhooks/demo/tour_generator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def _generate_tour(self):
if name in test_inputs:
hid = self._hids[name]
dataset = self._test.inputs[name][0]
step["content"] = "Select dataset: <b>{}: {}</b>".format(hid, dataset)
step["content"] = f"Select dataset: <b>{hid}: {dataset}</b>"
else:
step["content"] = "Select a dataset"

Expand Down Expand Up @@ -192,7 +192,7 @@ def _generate_tour(self):
if case_id in self._data_inputs.keys():
hid = self._hids[case_id]
dataset = self._test.inputs[tour_id][0]
step_msg = "Select dataset: <b>%s: %s</b>" % (hid, dataset)
step_msg = f"Select dataset: <b>{hid}: {dataset}</b>"
else:
case_params = ", ".join(self._test.inputs[tour_id])
step_msg = "Select parameter(s): " + "<b>%s</b>" % case_params
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/containers/docker_swarm.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ def node_ps(self, node_id):
def node_update(self, node_id, **kwopts):
return self._run_docker(
subcommand="node update",
args="{kwopts} {node_id}".format(kwopts=self._stringify_kwopts(kwopts), node_id=node_id),
args=f"{self._stringify_kwopts(kwopts)} {node_id}",
)

@docker_json
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/blast.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def merge(split_files, output_file):
# For one file only, use base class method (move/copy)
return Text.merge(split_files, output_file)
if not split_files:
raise ValueError("Given no BLAST XML files, %r, to merge into %s" % (split_files, output_file))
raise ValueError(f"Given no BLAST XML files, {split_files!r}, to merge into {output_file}")
with open(output_file, "w") as out:
h = None
old_header = None
Expand Down
17 changes: 7 additions & 10 deletions lib/galaxy/datatypes/goldenpath.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def _read_file(self):

line_number = 0
in_body = False
with open(self.fname, "r") as f:
with open(self.fname) as f:
for line in f:
line_number += 1
line = line.rstrip("\n")
Expand Down Expand Up @@ -217,14 +217,12 @@ def num_lines(self):

def iterate_objs(self):
"""Iterate over the objects of the AGP file."""
for obj in self._objects:
yield obj
yield from self._objects

def iterate_lines(self):
"""Iterate over the non-comment lines of AGP file."""
for obj in self.iterate_objs():
for j in obj.iterate_lines():
yield j
yield from obj.iterate_lines()


class AGPObject:
Expand Down Expand Up @@ -302,11 +300,10 @@ def add_line(self, agp_line):
self._agp_lines.append(agp_line)

def iterate_lines(self):
for i in self._agp_lines:
yield i
yield from self._agp_lines


class AGPLine(object, metaclass=abc.ABCMeta):
class AGPLine(metaclass=abc.ABCMeta):
"""
An abstract base class representing a single AGP file line. Inheriting subclasses should
override or implement new methods to check the validity of a single AFP line. Validity
Expand Down Expand Up @@ -388,7 +385,7 @@ def __init__(
self.orientation = orientation

# Set the object attributes and perform superclass-defined validations
super(AGPSeqLine, self).__init__(fname, line_number, obj, obj_beg, obj_end, pid, comp_type)
super().__init__(fname, line_number, obj, obj_beg, obj_end, pid, comp_type)

self.is_gap = False
self.seqdict = dict(
Expand Down Expand Up @@ -509,7 +506,7 @@ def __init__(
self.linkage_evidence = linkage_evidence

# Set the object attributes and perform superclass-defined validations
super(AGPGapLine, self).__init__(fname, line_number, obj, obj_beg, obj_end, pid, comp_type)
super().__init__(fname, line_number, obj, obj_beg, obj_end, pid, comp_type)

self.is_gap = True
self.gapdict = dict(
Expand Down
6 changes: 2 additions & 4 deletions lib/galaxy/datatypes/interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,9 +315,7 @@ def ucsc_links(self, dataset, type, app, base_url):
"%s%s/display_as?id=%i&display_app=%s&authz_method=display_at"
% (base_url, app.url_for(controller="root"), dataset.id, type)
)
redirect_url = quote_plus(
"%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, chrom, start, stop)
)
redirect_url = quote_plus(f"{site_url}db={dataset.dbkey}&position={chrom}:{start}-{stop}&hgt.customText=%s")
link = f"{internal_url}?redirect_url={redirect_url}&display_url={display_url}"
ret_val.append((site_name, link))
return ret_val
Expand Down Expand Up @@ -933,7 +931,7 @@ def ucsc_links(self, dataset, type, app, base_url):
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build("ucsc", dataset.dbkey):
if site_name in app.datatypes_registry.get_display_sites("ucsc"):
redirect_url = quote_plus(
"%sdb=%s&position=%s:%s-%s&hgt.customText=%%s" % (site_url, dataset.dbkey, seqid, start, stop)
f"{site_url}db={dataset.dbkey}&position={seqid}:{start}-{stop}&hgt.customText=%s"
)
link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url)
ret_val.append((site_name, link))
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/datatypes/molecules.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ def merge(split_files, output_file):
# For one file only, use base class method (move/copy)
return Text.merge(split_files, output_file)
if not split_files:
raise ValueError("No fps files given, %r, to merge into %s" % (split_files, output_file))
raise ValueError(f"No fps files given, {split_files!r}, to merge into {output_file}")
with open(output_file, "w") as out:
first = True
for filename in split_files:
Expand Down Expand Up @@ -940,7 +940,7 @@ def merge(split_files, output_file):
# For one file only, use base class method (move/copy)
return Text.merge(split_files, output_file)
if not split_files:
raise ValueError("Given no CML files, %r, to merge into %s" % (split_files, output_file))
raise ValueError(f"Given no CML files, {split_files!r}, to merge into {output_file}")
with open(output_file, "w") as out:
for filename in split_files:
with open(filename) as handle:
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/neo4j.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def generate_primary_file(self, dataset=None):
opt_text = ""
if composite_file.optional:
opt_text = " (optional)"
rval.append('<li><a href="%s">%s</a>%s' % (composite_name, composite_name, opt_text))
rval.append(f'<li><a href="{composite_name}">{composite_name}</a>{opt_text}')
rval.append("</ul></html>")
return "\n".join(rval)

Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/datatypes/spaln.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def generate_primary_file(self, dataset=None):
% (fn, fn, composite_file.get("description"), opt_text)
)
else:
rval.append('<li><a href="%s" type="application/binary">%s</a>%s</li>' % (fn, fn, opt_text))
rval.append(f'<li><a href="{fn}" type="application/binary">{fn}</a>{opt_text}</li>')
rval.append("</ul></div></html>")
return "\n".join(rval)

Expand Down Expand Up @@ -141,7 +141,7 @@ def display_data(self, trans, data, preview=False, filename=None, to_ext=None, s
msg = title
# Galaxy assumes HTML for the display of composite datatypes,
return (
smart_str("<html><head><title>%s</title></head><body><pre>%s</pre></body></html>" % (title, msg)),
smart_str(f"<html><head><title>{title}</title></head><body><pre>{msg}</pre></body></html>"),
headers,
)

Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/datatypes/util/gff_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def __init__(
for interval in self.intervals:
# Error checking. NOTE: intervals need not share the same strand.
if interval.chrom != self.chrom:
raise ValueError("interval chrom does not match self chrom: %s != %s" % (interval.chrom, self.chrom))
raise ValueError(f"interval chrom does not match self chrom: {interval.chrom} != {self.chrom}")
# Set start, end of interval.
if interval.start < self.start:
self.start = interval.start
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/managers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,11 +173,11 @@ def get_object(trans, id, class_name, check_ownership=False, check_accessible=Fa
security_check(trans, item, check_ownership, check_accessible)
if deleted is True and not item.deleted:
raise exceptions.ItemDeletionException(
'%s "%s" is not deleted' % (class_name, getattr(item, "name", id)), type="warning"
'{} "{}" is not deleted'.format(class_name, getattr(item, "name", id)), type="warning"
)
elif deleted is False and item.deleted:
raise exceptions.ItemDeletionException(
'%s "%s" is deleted' % (class_name, getattr(item, "name", id)), type="warning"
'{} "{}" is deleted'.format(class_name, getattr(item, "name", id)), type="warning"
)
return item

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def upgrade(migrate_engine):
# handle tool_version_associaions because we do not have the information we need to do so.
tools = repository_metadata.get("tools", [])
for tool_dict in tools:
cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % (
cmd = "INSERT INTO tool_version VALUES ({}, {}, {}, '{}', {})".format(
nextval(migrate_engine, "tool_version"),
localtimestamp(migrate_engine),
localtimestamp(migrate_engine),
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/objectstore/cloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ def _empty(self, obj, **kwargs):
if self._exists(obj, **kwargs):
return bool(self._size(obj, **kwargs) > 0)
else:
raise ObjectNotFound("objectstore.empty, object does not exist: %s, kwargs: %s" % (str(obj), str(kwargs)))
raise ObjectNotFound(f"objectstore.empty, object does not exist: {str(obj)}, kwargs: {str(kwargs)}")

def _size(self, obj, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
Expand Down Expand Up @@ -711,7 +711,7 @@ def _get_filename(self, obj, **kwargs):
# even if it does not exist.
# if dir_only:
# return cache_path
raise ObjectNotFound("objectstore.get_filename, no cache_path: %s, kwargs: %s" % (str(obj), str(kwargs)))
raise ObjectNotFound(f"objectstore.get_filename, no cache_path: {str(obj)}, kwargs: {str(kwargs)}")
# return cache_path # Until the upload tool does not explicitly create the dataset, return expected path

def _update_from_file(self, obj, file_name=None, create=False, **kwargs):
Expand All @@ -737,7 +737,7 @@ def _update_from_file(self, obj, file_name=None, create=False, **kwargs):
self._push_to_os(rel_path, source_file)
else:
raise ObjectNotFound(
"objectstore.update_from_file, object does not exist: %s, kwargs: %s" % (str(obj), str(kwargs))
f"objectstore.update_from_file, object does not exist: {str(obj)}, kwargs: {str(kwargs)}"
)

def _get_object_url(self, obj, **kwargs):
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/objectstore/irods.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ def _empty(self, obj, **kwargs):
if self._exists(obj, **kwargs):
return bool(self._size(obj, **kwargs) > 0)
else:
raise ObjectNotFound("objectstore.empty, object does not exist: %s, kwargs: %s" % (str(obj), str(kwargs)))
raise ObjectNotFound(f"objectstore.empty, object does not exist: {str(obj)}, kwargs: {str(kwargs)}")

def _size(self, obj, **kwargs):
ipt_timer = ExecutionTimer()
Expand Down Expand Up @@ -682,7 +682,7 @@ def _get_filename(self, obj, **kwargs):
# if dir_only:
# return cache_path
log.debug("irods_pt _get_filename: %s", ipt_timer)
raise ObjectNotFound("objectstore.get_filename, no cache_path: %s, kwargs: %s" % (str(obj), str(kwargs)))
raise ObjectNotFound(f"objectstore.get_filename, no cache_path: {str(obj)}, kwargs: {str(kwargs)}")
# return cache_path # Until the upload tool does not explicitly create the dataset, return expected path

def _update_from_file(self, obj, file_name=None, create=False, **kwargs):
Expand Down Expand Up @@ -710,7 +710,7 @@ def _update_from_file(self, obj, file_name=None, create=False, **kwargs):
else:
log.debug("irods_pt _update_from_file: %s", ipt_timer)
raise ObjectNotFound(
"objectstore.update_from_file, object does not exist: %s, kwargs: %s" % (str(obj), str(kwargs))
f"objectstore.update_from_file, object does not exist: {str(obj)}, kwargs: {str(kwargs)}"
)
log.debug("irods_pt _update_from_file: %s", ipt_timer)

Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/objectstore/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -626,7 +626,7 @@ def _empty(self, obj, **kwargs):
if self._exists(obj, **kwargs):
return bool(self._size(obj, **kwargs) > 0)
else:
raise ObjectNotFound("objectstore.empty, object does not exist: %s, kwargs: %s" % (str(obj), str(kwargs)))
raise ObjectNotFound(f"objectstore.empty, object does not exist: {str(obj)}, kwargs: {str(kwargs)}")

def _size(self, obj, **kwargs):
rel_path = self._construct_path(obj, **kwargs)
Expand Down Expand Up @@ -723,7 +723,7 @@ def _get_filename(self, obj, **kwargs):
# even if it does not exist.
# if dir_only:
# return cache_path
raise ObjectNotFound("objectstore.get_filename, no cache_path: %s, kwargs: %s" % (str(obj), str(kwargs)))
raise ObjectNotFound(f"objectstore.get_filename, no cache_path: {str(obj)}, kwargs: {str(kwargs)}")
# return cache_path # Until the upload tool does not explicitly create the dataset, return expected path

def _update_from_file(self, obj, file_name=None, create=False, **kwargs):
Expand All @@ -749,7 +749,7 @@ def _update_from_file(self, obj, file_name=None, create=False, **kwargs):
self._push_to_os(rel_path, source_file)
else:
raise ObjectNotFound(
"objectstore.update_from_file, object does not exist: %s, kwargs: %s" % (str(obj), str(kwargs))
f"objectstore.update_from_file, object does not exist: {str(obj)}, kwargs: {str(kwargs)}"
)

def _get_object_url(self, obj, **kwargs):
Expand Down
4 changes: 3 additions & 1 deletion lib/galaxy/queue_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,9 @@ def admin_job_lock(app, **kwargs):
# point, so we're using the reference from the handler.
app.job_manager.job_lock = job_lock
log.info(
"Administrative Job Lock is now set to %s. Jobs will %s dispatch." % (job_lock, "not" if job_lock else "now")
"Administrative Job Lock is now set to {}. Jobs will {} dispatch.".format(
job_lock, "not" if job_lock else "now"
)
)


Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/security/vault.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def list_secrets(self, key: str) -> List[str]:
raise NotImplementedError()


class VaultFactory(object):
class VaultFactory:
@staticmethod
def load_vault_config(vault_conf_yml: str) -> Optional[dict]:
if os.path.exists(vault_conf_yml):
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/tool_shed/galaxy_install/install_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ def install_specified_tool_dependencies(self, tool_shed_repository, tool_depende
elem, tool_shed_repository, tool_dependencies=tool_dependencies
)
except Exception as e:
error_message = "Error installing tool dependency %s version %s: %s" % (
error_message = "Error installing tool dependency {} version {}: {}".format(
str(name),
str(version),
str(e),
Expand Down Expand Up @@ -553,7 +553,7 @@ def __get_install_info_from_tool_shed(self, tool_shed_url, name, owner, changese
)
except Exception:
message = "Error attempting to retrieve installation information from tool shed "
message += "%s for revision %s of repository %s owned by %s" % (
message += "{} for revision {} of repository {} owned by {}".format(
tool_shed_url,
changeset_revision,
name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -874,7 +874,7 @@ def purge_repository(self, repository):
# Purge the repository.
sa_session.delete(repository)
sa_session.flush()
message = "The repository named <b>%s</b> with status <b>%s</b> has been purged.<br/>" % (
message = "The repository named <b>{}</b> with status <b>{}</b> has been purged.<br/>".format(
str(repository.name),
str(repository.status),
)
Expand Down Expand Up @@ -909,13 +909,13 @@ def remove_entry_from_installed_repository_dependencies_of_installed_repositorie
altered_installed_dependent_repositories_of_installed_repositories = {}
for r_tup, v_tups in self.installed_dependent_repositories_of_installed_repositories.items():
if repository_tup in v_tups:
debug_msg = "Removing entry for revision %s of repository %s owned by %s " % (
debug_msg = "Removing entry for revision {} of repository {} owned by {} ".format(
installed_changeset_revision,
name,
owner,
)
r_tool_shed, r_name, r_owner, r_installed_changeset_revision = r_tup
debug_msg += "from the dependent list for revision %s of repository %s owned by %s " % (
debug_msg += "from the dependent list for revision {} of repository {} owned by {} ".format(
r_installed_changeset_revision,
r_name,
r_owner,
Expand Down
Loading

0 comments on commit 00817ea

Please sign in to comment.