From e95d7033be04a3d8358734ecbaa28924b7c95a98 Mon Sep 17 00:00:00 2001 From: Yoann Schneider <114239491+yschneider-sinneria@users.noreply.github.com> Date: Fri, 2 Feb 2024 21:02:10 +0100 Subject: [PATCH] Bug 1823654 - Introduce pyupgrade (#7904) * Ruff Auto fix * Ruff unsafe fixes auto fix * Use builtin list instead of typing.List --------- Co-authored-by: Sebastian Hengst --- misc/compare_pushes.py | 2 +- pyproject.toml | 2 + tests/autoclassify/utils.py | 4 +- tests/conftest.py | 10 ++-- tests/e2e/test_job_ingestion.py | 2 +- tests/etl/test_perf_data_load.py | 3 +- tests/etl/test_pushlog.py | 2 +- tests/etl/test_text.py | 1 - .../intermittents_commenter/test_commenter.py | 2 +- .../test_log_view_artifact_builder.py | 4 +- tests/log_parser/test_performance_parser.py | 2 +- .../test_backfill_report_maintainer.py | 3 +- .../test_common_behaviour.py | 8 +-- .../test_criteria_tracker.py | 2 +- .../test_engineer_traction.py | 3 +- tests/push_health/test_usage.py | 2 +- tests/sampledata.py | 56 ++++++------------- tests/services/test_taskcluster.py | 8 +-- tests/test_dockerflow.py | 2 +- tests/test_utils.py | 2 +- tests/webapp/api/test_bug_job_map_api.py | 4 +- tests/webapp/api/test_bugzilla.py | 2 - tests/webapp/api/test_jobs_api.py | 16 ++---- .../webapp/api/test_performance_alerts_api.py | 2 +- .../api/test_performance_alertsummary_api.py | 2 +- .../api/test_performance_bug_template_api.py | 14 ++--- tests/webapp/api/test_performance_data_api.py | 28 ++++------ tests/webapp/api/test_version.py | 2 +- treeherder/changelog/models.py | 2 +- treeherder/client/setup.py | 3 +- treeherder/client/thclient/client.py | 8 +-- treeherder/etl/management/commands/ingest.py | 18 +++--- .../management/commands/publish_to_pulse.py | 4 +- .../etl/management/commands/pulse_listener.py | 2 +- .../commands/pulse_listener_tasks.py | 2 +- .../pulse_listener_tasks_classification.py | 2 +- treeherder/etl/perf.py | 8 +-- treeherder/etl/push_loader.py | 4 +- treeherder/etl/pushlog.py | 4 +- treeherder/etl/taskcluster_pulse/handler.py | 8 +-- treeherder/etl/text.py | 3 +- .../intermittents_commenter/commenter.py | 10 ++-- treeherder/log_parser/failureline.py | 2 +- .../management/commands/test_parse_log.py | 2 +- treeherder/log_parser/parsers.py | 32 +++++------ treeherder/log_parser/utils.py | 2 +- treeherder/model/data_cycling/cyclers.py | 17 +++--- .../model/data_cycling/removal_strategies.py | 5 +- .../model/data_cycling/signature_remover.py | 5 +- treeherder/model/error_summary.py | 2 +- .../commands/cache_failure_history.py | 2 +- ...hed_0022_modify_bugscache_and_bugjobmap.py | 1 - .../0002_add_bugjobmap_model_manager.py | 1 - .../0003_add_matcher_name_fields.py | 1 - .../0004_populate_matcher_name_fields.py | 1 - ..._use_matcher_name_for_unique_constraint.py | 1 - .../model/migrations/0006_drop_matcher_fks.py | 1 - ...n_classified_failures_and_failure_match.py | 1 - .../migrations/0008_remove_failure_match.py | 1 - .../0009_add_manager_to_push_and_job.py | 1 - .../migrations/0010_remove_runnable_job.py | 1 - .../migrations/0011_remove_matcher_table.py | 1 - .../model/migrations/0012_branch_maxlen.py | 1 - .../0013_add_index_to_push_revision.py | 1 - .../0015_add_repository_tc_root_url.py | 1 - treeherder/model/models.py | 43 +++++++------- .../auto_perf_sheriffing/backfill_reports.py | 38 ++++++------- .../perf/auto_perf_sheriffing/secretary.py | 3 +- .../perf/auto_perf_sheriffing/sherlock.py | 21 ++++--- treeherder/perf/email.py | 18 +++--- .../commands/compute_criteria_formulas.py | 3 +- .../management/commands/import_perf_data.py | 26 ++++----- .../perf/management/commands/perf_sheriff.py | 3 +- .../0001_squashed_0005_permit_github_links.py | 1 - .../0006_add_alert_summary_notes.py | 1 - .../migrations/0007_star_performancealert.py | 1 - .../migrations/0008_add_confirming_state.py | 1 - .../0009_non_nullable_issue_tracker.py | 1 - .../0010_fix_signature_uniqueness.py | 1 - .../0011_inc_extra_options_length.py | 1 - .../0012_rename_summary_last_updated.py | 1 - treeherder/perf/models.py | 38 ++++++------- .../sheriffing_criteria/bugzilla_formulas.py | 25 ++++----- .../sheriffing_criteria/criteria_tracking.py | 16 +++--- treeherder/perfalert/perfalert/__init__.py | 2 +- treeherder/push_health/tests.py | 8 +-- treeherder/push_health/usage.py | 2 +- treeherder/push_health/utils.py | 2 +- treeherder/services/pulse/consumers.py | 10 ++-- treeherder/services/taskcluster.py | 5 +- treeherder/utils/github.py | 16 +++--- treeherder/utils/http.py | 2 +- treeherder/utils/taskcluster.py | 6 +- treeherder/webapp/api/bugzilla.py | 2 - treeherder/webapp/api/infra_serializers.py | 2 +- treeherder/webapp/api/investigated_test.py | 20 ++----- treeherder/webapp/api/jobs.py | 18 +++--- treeherder/webapp/api/note.py | 6 +- treeherder/webapp/api/perfcompare_utils.py | 6 +- treeherder/webapp/api/performance_data.py | 9 ++- .../webapp/api/performance_serializers.py | 4 +- treeherder/webapp/api/push.py | 32 ++++------- treeherder/webapp/api/serializers.py | 4 +- 103 files changed, 320 insertions(+), 429 deletions(-) diff --git a/misc/compare_pushes.py b/misc/compare_pushes.py index 47853ee9543..c23844d660d 100755 --- a/misc/compare_pushes.py +++ b/misc/compare_pushes.py @@ -27,7 +27,7 @@ def main(args): # Support comma separated projects projects = args.projects.split(",") for _project in projects: - logger.info("Comparing {} against production.".format(_project)) + logger.info(f"Comparing {_project} against production.") # Remove properties that are irrelevant for the comparison pushes = compare_to_client.get_pushes(_project, count=50) for _push in sorted(pushes, key=lambda push: push["revision"]): diff --git a/pyproject.toml b/pyproject.toml index 8efc5091277..0feb7161416 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,8 @@ select = [ "W", # pyflakes "F", + # pyupgrade + "UP", ] ignore = [ diff --git a/tests/autoclassify/utils.py b/tests/autoclassify/utils.py index 90c4d669ffd..3fc5241cef7 100644 --- a/tests/autoclassify/utils.py +++ b/tests/autoclassify/utils.py @@ -42,10 +42,10 @@ def create_failure_lines(job, failure_line_list, start_line=0): job_log = JobLog.objects.create( job=job, name="{}{}".format(base_data.get("test"), job.id), - url="bar{}".format(i), + url=f"bar{i}", status=1, ) - print("create jobLog for job id: {}".format(job.id)) + print(f"create jobLog for job id: {job.id}") failure_line.job_log = job_log failure_line.save() failure_lines.append(failure_line) diff --git a/tests/conftest.py b/tests/conftest.py index 7e7b4527df7..0b485d6e9a9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -427,7 +427,7 @@ def eleven_job_blobs(sample_data, sample_push, test_repository, mock_log_parser) del blob["sources"] blob["revision"] = sample_push[push_index]["revision"] - blob["taskcluster_task_id"] = "V3SVuxO8TFy37En_6HcXL{}".format(task_id_index) + blob["taskcluster_task_id"] = f"V3SVuxO8TFy37En_6HcXL{task_id_index}" blob["taskcluster_retry_id"] = "0" blobs.append(blob) @@ -463,7 +463,7 @@ def eleven_job_blobs_new_date(sample_data, sample_push, test_repository, mock_lo del blob["sources"] blob["revision"] = sample_push[push_index]["revision"] - blob["taskcluster_task_id"] = "V3SVuxO8TFy37En_6HcX{:0>2}".format(task_id_index) + blob["taskcluster_task_id"] = f"V3SVuxO8TFy37En_6HcX{task_id_index:0>2}" blob["taskcluster_retry_id"] = "0" blob["job"]["revision"] = sample_push[push_index]["revision"] blob["job"]["submit_timestamp"] = sample_push[push_index]["push_timestamp"] @@ -843,7 +843,7 @@ def _fetch_data(self, project): % project ) files_bugzilla_data = None - file_name = "files_bugzilla_map_%s_%s.json" % (project, self.run_id) + file_name = f"files_bugzilla_map_{project}_{self.run_id}.json" exception = None try: tests_folder = os.path.dirname(__file__) @@ -1117,7 +1117,7 @@ def bug_data(eleven_jobs_stored, test_repository, test_push, bugs): bug_id = bugs[0].id job_id = jobs[0].id th_models.BugJobMap.create(job_id=job_id, bug_id=bug_id) - query_string = "?startday=2012-05-09&endday=2018-05-10&tree={}".format(test_repository.name) + query_string = f"?startday=2012-05-09&endday=2018-05-10&tree={test_repository.name}" return { "tree": test_repository.name, @@ -1270,7 +1270,7 @@ def __init__(self, *prior_dirs): def __call__(self, fixture_filename): fixture_path = join(*self._prior_dirs, fixture_filename) - with open(fixture_path, "r") as f: + with open(fixture_path) as f: return json.load(f) diff --git a/tests/e2e/test_job_ingestion.py b/tests/e2e/test_job_ingestion.py index 7dff6573c11..3c8264231a7 100644 --- a/tests/e2e/test_job_ingestion.py +++ b/tests/e2e/test_job_ingestion.py @@ -1,4 +1,4 @@ -from mock import MagicMock +from unittest.mock import MagicMock from tests.test_utils import add_log_response from treeherder.etl.jobs import store_job_data diff --git a/tests/etl/test_perf_data_load.py b/tests/etl/test_perf_data_load.py index 518af66550b..3aa459f725f 100644 --- a/tests/etl/test_perf_data_load.py +++ b/tests/etl/test_perf_data_load.py @@ -5,7 +5,6 @@ import time import pytest -from typing import List from django.core.management import call_command from django.db import IntegrityError @@ -87,7 +86,7 @@ def sample_perf_artifact() -> dict: @pytest.fixture -def sibling_perf_artifacts(sample_perf_artifact: dict) -> List[dict]: +def sibling_perf_artifacts(sample_perf_artifact: dict) -> list[dict]: """intended to belong to the same job""" artifacts = [copy.deepcopy(sample_perf_artifact) for _ in range(3)] diff --git a/tests/etl/test_pushlog.py b/tests/etl/test_pushlog.py index 8da2f658d8f..2cee7d84945 100644 --- a/tests/etl/test_pushlog.py +++ b/tests/etl/test_pushlog.py @@ -104,7 +104,7 @@ def test_ingest_hg_pushlog_cache_last_push(test_repository, test_base_dir, activ pushes = pushlog_dict["pushes"] max_push_id = max(int(k) for k in pushes.keys()) - cache_key = "{}:last_push_id".format(test_repository.name) + cache_key = f"{test_repository.name}:last_push_id" assert cache.get(cache_key) == max_push_id diff --git a/tests/etl/test_text.py b/tests/etl/test_text.py index 9046ae12c6d..62950c3df1b 100644 --- a/tests/etl/test_text.py +++ b/tests/etl/test_text.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from treeherder.etl.text import astral_filter, filter_re diff --git a/tests/intermittents_commenter/test_commenter.py b/tests/intermittents_commenter/test_commenter.py index c32a0e2d60c..965521bf6b2 100644 --- a/tests/intermittents_commenter/test_commenter.py +++ b/tests/intermittents_commenter/test_commenter.py @@ -37,7 +37,7 @@ def test_intermittents_commenter(bug_data): comment_params = process.generate_bug_changes(startday, endday, alt_startday, alt_endday) - with open("tests/intermittents_commenter/expected_comment.text", "r") as comment: + with open("tests/intermittents_commenter/expected_comment.text") as comment: expected_comment = comment.read() print(len(expected_comment)) print(len(comment_params[0]["changes"]["comment"]["body"])) diff --git a/tests/log_parser/test_log_view_artifact_builder.py b/tests/log_parser/test_log_view_artifact_builder.py index 74b7160ce52..d1e345490fd 100644 --- a/tests/log_parser/test_log_view_artifact_builder.py +++ b/tests/log_parser/test_log_view_artifact_builder.py @@ -18,7 +18,7 @@ def do_test(log): result file with the same prefix. """ - url = add_log_response("{}.txt.gz".format(log)) + url = add_log_response(f"{log}.txt.gz") builder = LogViewerArtifactBuilder(url) lpc = ArtifactBuilderCollection(url, builders=builder) @@ -31,7 +31,7 @@ def do_test(log): # with open(SampleData().get_log_path("{0}.logview.json".format(log)), "w") as f: # f.write(json.dumps(act, indent=2)) - exp = test_utils.load_exp("{0}.logview.json".format(log)) + exp = test_utils.load_exp(f"{log}.logview.json") assert act == exp diff --git a/tests/log_parser/test_performance_parser.py b/tests/log_parser/test_performance_parser.py index 34944a36789..1c36e142a96 100644 --- a/tests/log_parser/test_performance_parser.py +++ b/tests/log_parser/test_performance_parser.py @@ -27,6 +27,6 @@ def test_performance_log_parsing_malformed_perfherder_data(): } ], } - parser.parse_line("PERFHERDER_DATA: {}".format(json.dumps(valid_perfherder_data)), 3) + parser.parse_line(f"PERFHERDER_DATA: {json.dumps(valid_perfherder_data)}", 3) assert parser.get_artifact() == [valid_perfherder_data] diff --git a/tests/perf/auto_perf_sheriffing/test_backfill_reports/test_backfill_report_maintainer.py b/tests/perf/auto_perf_sheriffing/test_backfill_reports/test_backfill_report_maintainer.py index 9729c58f394..682e23d07a4 100644 --- a/tests/perf/auto_perf_sheriffing/test_backfill_reports/test_backfill_report_maintainer.py +++ b/tests/perf/auto_perf_sheriffing/test_backfill_reports/test_backfill_report_maintainer.py @@ -1,6 +1,5 @@ import random import datetime -from typing import Tuple from treeherder.perf.auto_perf_sheriffing.backfill_reports import ( BackfillReportMaintainer, @@ -141,7 +140,7 @@ def test_reports_are_updated_after_alert_summaries_change( assert initial_records_timestamps != records_timestamps -def __fetch_report_timestamps(test_perf_alert_summary) -> Tuple: +def __fetch_report_timestamps(test_perf_alert_summary) -> tuple: report = BackfillReport.objects.get(summary=test_perf_alert_summary) report_timestamps = report.created, report.last_updated records_timestamps = [record.created for record in report.records.all()] diff --git a/tests/perf/auto_sheriffing_criteria/test_common_behaviour.py b/tests/perf/auto_sheriffing_criteria/test_common_behaviour.py index 994466fd9e4..d1c04998054 100644 --- a/tests/perf/auto_sheriffing_criteria/test_common_behaviour.py +++ b/tests/perf/auto_sheriffing_criteria/test_common_behaviour.py @@ -2,7 +2,7 @@ import pytest from django.conf import settings -from typing import List, Type, Callable +from typing import Callable from tests.perf.auto_sheriffing_criteria.conftest import CASSETTES_RECORDING_DATE from treeherder.config.settings import BZ_DATETIME_FORMAT @@ -18,15 +18,15 @@ pytestmark = [pytest.mark.freeze_time(CASSETTES_RECORDING_DATE, tick=True)] -def bugzilla_formula_instances() -> List[BugzillaFormula]: +def bugzilla_formula_instances() -> list[BugzillaFormula]: return [EngineerTractionFormula(), FixRatioFormula()] -def formula_instances() -> List[Callable]: +def formula_instances() -> list[Callable]: return bugzilla_formula_instances() + [TotalAlertsFormula()] -def concrete_formula_classes() -> List[Type[BugzillaFormula]]: +def concrete_formula_classes() -> list[type[BugzillaFormula]]: return [EngineerTractionFormula, FixRatioFormula] diff --git a/tests/perf/auto_sheriffing_criteria/test_criteria_tracker.py b/tests/perf/auto_sheriffing_criteria/test_criteria_tracker.py index 33d972fab38..6b49ff0d4ff 100644 --- a/tests/perf/auto_sheriffing_criteria/test_criteria_tracker.py +++ b/tests/perf/auto_sheriffing_criteria/test_criteria_tracker.py @@ -151,7 +151,7 @@ def should_take_more_than(seconds: float): @pytest.fixture def updatable_criteria_csv(tmp_path): updatable_csv = tmp_path / "updatable-criteria.csv" - with open(RECORD_TEST_PATH, "r") as file_: + with open(RECORD_TEST_PATH) as file_: updatable_csv.write_text(file_.read()) return updatable_csv diff --git a/tests/perf/auto_sheriffing_criteria/test_engineer_traction.py b/tests/perf/auto_sheriffing_criteria/test_engineer_traction.py index 530d693e4d9..dde39207d04 100644 --- a/tests/perf/auto_sheriffing_criteria/test_engineer_traction.py +++ b/tests/perf/auto_sheriffing_criteria/test_engineer_traction.py @@ -1,7 +1,6 @@ import pytest from datetime import datetime, timedelta -from typing import List from tests.perf.auto_sheriffing_criteria.conftest import CASSETTES_RECORDING_DATE from treeherder.config.settings import BZ_DATETIME_FORMAT @@ -44,7 +43,7 @@ def quantified_bugs(betamax_recorder) -> list: @pytest.fixture -def cooled_down_bugs(nonblock_session, quantified_bugs) -> List[dict]: +def cooled_down_bugs(nonblock_session, quantified_bugs) -> list[dict]: bugs = [] for bug in quantified_bugs: created_at = datetime.strptime(bug["creation_time"], BZ_DATETIME_FORMAT) diff --git a/tests/push_health/test_usage.py b/tests/push_health/test_usage.py index 04fd6bbaf04..09da19fbf2b 100644 --- a/tests/push_health/test_usage.py +++ b/tests/push_health/test_usage.py @@ -34,7 +34,7 @@ def test_get_usage(push_usage, test_repository): nrql = "SELECT%20max(needInvestigation)%20FROM%20push_health_need_investigation%20FACET%20revision%20SINCE%201%20DAY%20AGO%20TIMESERIES%20where%20repo%3D'{}'%20AND%20appName%3D'{}'".format( "try", "treeherder-prod" ) - new_relic_url = "{}?nrql={}".format(settings.NEW_RELIC_INSIGHTS_API_URL, nrql) + new_relic_url = f"{settings.NEW_RELIC_INSIGHTS_API_URL}?nrql={nrql}" responses.add( responses.GET, diff --git a/tests/sampledata.py b/tests/sampledata.py index 2296b67c331..99dda0f6d8d 100644 --- a/tests/sampledata.py +++ b/tests/sampledata.py @@ -5,89 +5,67 @@ class SampleData: @classmethod def get_perf_data(cls, filename): - with open( - "{0}/sample_data/artifacts/performance/{1}".format(os.path.dirname(__file__), filename) - ) as f: + with open(f"{os.path.dirname(__file__)}/sample_data/artifacts/performance/{filename}") as f: return json.load(f) def __init__(self): - self.job_data_file = "{0}/sample_data/job_data.txt".format(os.path.dirname(__file__)) + self.job_data_file = f"{os.path.dirname(__file__)}/sample_data/job_data.txt" - self.push_data_file = "{0}/sample_data/push_data.json".format(os.path.dirname(__file__)) + self.push_data_file = f"{os.path.dirname(__file__)}/sample_data/push_data.json" - self.logs_dir = "{0}/sample_data/logs".format(os.path.dirname(__file__)) + self.logs_dir = f"{os.path.dirname(__file__)}/sample_data/logs" - with open( - "{0}/sample_data/artifacts/text_log_summary.json".format(os.path.dirname(__file__)) - ) as f: + with open(f"{os.path.dirname(__file__)}/sample_data/artifacts/text_log_summary.json") as f: self.text_log_summary = json.load(f) with open( - "{0}/sample_data/pulse_consumer/taskcluster_pulse_messages.json".format( + "{}/sample_data/pulse_consumer/taskcluster_pulse_messages.json".format( os.path.dirname(__file__) ) ) as f: self.taskcluster_pulse_messages = json.load(f) with open( - "{0}/sample_data/pulse_consumer/taskcluster_tasks.json".format( - os.path.dirname(__file__) - ) + f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/taskcluster_tasks.json" ) as f: self.taskcluster_tasks = json.load(f) with open( - "{0}/sample_data/pulse_consumer/taskcluster_transformed_jobs.json".format( + "{}/sample_data/pulse_consumer/taskcluster_transformed_jobs.json".format( os.path.dirname(__file__) ) ) as f: self.taskcluster_transformed_jobs = json.load(f) - with open( - "{0}/sample_data/pulse_consumer/job_data.json".format(os.path.dirname(__file__)) - ) as f: + with open(f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/job_data.json") as f: self.pulse_jobs = json.load(f) with open( - "{0}/sample_data/pulse_consumer/transformed_job_data.json".format( - os.path.dirname(__file__) - ) + f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/transformed_job_data.json" ) as f: self.transformed_pulse_jobs = json.load(f) - with open( - "{0}/sample_data/pulse_consumer/github_push.json".format(os.path.dirname(__file__)) - ) as f: + with open(f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/github_push.json") as f: self.github_push = json.load(f) with open( - "{0}/sample_data/pulse_consumer/transformed_gh_push.json".format( - os.path.dirname(__file__) - ) + f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/transformed_gh_push.json" ) as f: self.transformed_github_push = json.load(f) - with open( - "{0}/sample_data/pulse_consumer/github_pr.json".format(os.path.dirname(__file__)) - ) as f: + with open(f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/github_pr.json") as f: self.github_pr = json.load(f) with open( - "{0}/sample_data/pulse_consumer/transformed_gh_pr.json".format( - os.path.dirname(__file__) - ) + f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/transformed_gh_pr.json" ) as f: self.transformed_github_pr = json.load(f) - with open( - "{0}/sample_data/pulse_consumer/hg_push.json".format(os.path.dirname(__file__)) - ) as f: + with open(f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/hg_push.json") as f: self.hg_push = json.load(f) with open( - "{0}/sample_data/pulse_consumer/transformed_hg_push.json".format( - os.path.dirname(__file__) - ) + f"{os.path.dirname(__file__)}/sample_data/pulse_consumer/transformed_hg_push.json" ) as f: self.transformed_hg_push = json.load(f) @@ -106,4 +84,4 @@ def initialize_data(self): def get_log_path(self, name): """Returns the full path to a log file""" - return "{0}/{1}".format(self.logs_dir, name) + return f"{self.logs_dir}/{name}" diff --git a/tests/services/test_taskcluster.py b/tests/services/test_taskcluster.py index d0244cb067b..3b55fe455ce 100644 --- a/tests/services/test_taskcluster.py +++ b/tests/services/test_taskcluster.py @@ -54,15 +54,15 @@ def test_filter_relevant_actions(self, actions_json, original_task, expected_act def test_task_in_context(self): # match - tag_set_list, task_tags = [ + tag_set_list, task_tags = ( load_json_fixture(f) for f in ("matchingTagSetList.json", "matchingTaskTags.json") - ] + ) assert TaskclusterModelImpl._task_in_context(tag_set_list, task_tags) is True # mismatch - tag_set_list, task_tags = [ + tag_set_list, task_tags = ( load_json_fixture(f) for f in ("mismatchingTagSetList.json", "mismatchingTaskTags.json") - ] + ) assert TaskclusterModelImpl._task_in_context(tag_set_list, task_tags) is False def test_get_action(self, actions_json, expected_backfill_task): diff --git a/tests/test_dockerflow.py b/tests/test_dockerflow.py index f8362c54e91..497fe3a9a45 100644 --- a/tests/test_dockerflow.py +++ b/tests/test_dockerflow.py @@ -9,7 +9,7 @@ def test_get_version(client): response = client.get("/__version__") assert response.status_code == 200 - with open(f"{settings.BASE_DIR}/version.json", "r") as version_file: + with open(f"{settings.BASE_DIR}/version.json") as version_file: assert response.json() == json.loads(version_file.read()) diff --git a/tests/test_utils.py b/tests/test_utils.py index 81042a789f1..69eedc54d42 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -213,7 +213,7 @@ def add_log_response(filename): Set up responses for a local gzipped log and return the url for it. """ log_path = SampleData().get_log_path(filename) - log_url = "http://my-log.mozilla.org/{}".format(filename) + log_url = f"http://my-log.mozilla.org/{filename}" with open(log_path, "rb") as log_file: content = log_file.read() diff --git a/tests/webapp/api/test_bug_job_map_api.py b/tests/webapp/api/test_bug_job_map_api.py index a937b8b25f6..b7d8fadd748 100644 --- a/tests/webapp/api/test_bug_job_map_api.py +++ b/tests/webapp/api/test_bug_job_map_api.py @@ -95,7 +95,7 @@ def test_bug_job_map_detail(client, eleven_jobs_stored, test_repository, test_us user=test_user, ) - pk = "{0}-{1}".format(job.id, bug.id) + pk = f"{job.id}-{bug.id}" resp = client.get( reverse("bug-job-map-detail", kwargs={"project": test_repository.name, "pk": pk}) @@ -130,7 +130,7 @@ def test_bug_job_map_delete( if not test_no_auth: client.force_authenticate(user=test_user) - pk = "{0}-{1}".format(job.id, bug.id) + pk = f"{job.id}-{bug.id}" resp = client.delete( reverse("bug-job-map-detail", kwargs={"project": test_repository.name, "pk": pk}) diff --git a/tests/webapp/api/test_bugzilla.py b/tests/webapp/api/test_bugzilla.py index c02ca9fcf88..7f6d09f5508 100644 --- a/tests/webapp/api/test_bugzilla.py +++ b/tests/webapp/api/test_bugzilla.py @@ -1,5 +1,3 @@ -# coding: utf-8 - import json import responses diff --git a/tests/webapp/api/test_jobs_api.py b/tests/webapp/api/test_jobs_api.py index 4a2e453a7e7..7b1fb6e3b35 100644 --- a/tests/webapp/api/test_jobs_api.py +++ b/tests/webapp/api/test_jobs_api.py @@ -18,11 +18,9 @@ def test_job_list(client, eleven_jobs_stored, test_repository, offset, count, ex endpoint. """ url = reverse("jobs-list", kwargs={"project": test_repository.name}) - params = "&".join( - ["{}={}".format(k, v) for k, v in [("offset", offset), ("count", count)] if v] - ) + params = "&".join([f"{k}={v}" for k, v in [("offset", offset), ("count", count)] if v]) if params: - url += "?{}".format(params) + url += f"?{params}" resp = client.get(url) assert resp.status_code == 200 response_dict = resp.json() @@ -143,7 +141,7 @@ def test_job_list_filter_fields(client, eleven_jobs_stored, test_repository, fie to make this test easy. """ url = reverse("jobs-list", kwargs={"project": test_repository.name}) - final_url = url + "?{}={}".format(fieldname, expected) + final_url = url + f"?{fieldname}={expected}" resp = client.get(final_url) assert resp.status_code == 200 first = resp.json()["results"][0] @@ -245,11 +243,9 @@ def test_list_similar_jobs(client, eleven_jobs_stored, offset, count, expected_n job = Job.objects.get(id=1) url = reverse("jobs-similar-jobs", kwargs={"project": job.repository.name, "pk": job.id}) - params = "&".join( - ["{}={}".format(k, v) for k, v in [("offset", offset), ("count", count)] if v] - ) + params = "&".join([f"{k}={v}" for k, v in [("offset", offset), ("count", count)] if v]) if params: - url += "?{}".format(params) + url += f"?{params}" resp = client.get(url) assert resp.status_code == 200 @@ -288,7 +284,7 @@ def test_last_modified( pass url = reverse("jobs-list", kwargs={"project": test_repository.name}) - final_url = url + ("?{}={}".format(lm_key, lm_value)) + final_url = url + (f"?{lm_key}={lm_value}") resp = client.get(final_url) assert resp.status_code == exp_status diff --git a/tests/webapp/api/test_performance_alerts_api.py b/tests/webapp/api/test_performance_alerts_api.py index aeb87ce93d0..6c92f63a3f8 100644 --- a/tests/webapp/api/test_performance_alerts_api.py +++ b/tests/webapp/api/test_performance_alerts_api.py @@ -673,4 +673,4 @@ def dump(an_alert): for alert in alerts: dump(alert) for perf_datum in perf_data: - pprint("PerfData(id={0.push_id}, push_timestamp={0.push_timestamp})".format(perf_datum)) + pprint(f"PerfData(id={perf_datum.push_id}, push_timestamp={perf_datum.push_timestamp})") diff --git a/tests/webapp/api/test_performance_alertsummary_api.py b/tests/webapp/api/test_performance_alertsummary_api.py index 32b31b314e3..46c6e00fd24 100644 --- a/tests/webapp/api/test_performance_alertsummary_api.py +++ b/tests/webapp/api/test_performance_alertsummary_api.py @@ -31,7 +31,7 @@ def test_perf_alert_summary_onhold(test_repository_onhold, test_perf_framework): for i in range(2): Push.objects.create( repository=test_repository_onhold, - revision="1234abcd{}".format(i), + revision=f"1234abcd{i}", author="foo@bar.com", time=datetime.now(), ) diff --git a/tests/webapp/api/test_performance_bug_template_api.py b/tests/webapp/api/test_performance_bug_template_api.py index 089853dfbc3..08aced1258b 100644 --- a/tests/webapp/api/test_performance_bug_template_api.py +++ b/tests/webapp/api/test_performance_bug_template_api.py @@ -9,12 +9,12 @@ def test_perf_bug_template_api(client, test_perf_framework): template_dicts = [] for framework, i in zip((test_perf_framework, framework2), range(2)): dict = { - "keywords": "keyword{}".format(i), - "status_whiteboard": "sw{}".format(i), - "default_component": "dfcom{}".format(i), - "default_product": "dfprod{}".format(i), - "cc_list": "foo{}@bar.com".format(i), - "text": "my great text {}".format(i), + "keywords": f"keyword{i}", + "status_whiteboard": f"sw{i}", + "default_component": f"dfcom{i}", + "default_product": f"dfprod{i}", + "cc_list": f"foo{i}@bar.com", + "text": f"my great text {i}", } PerformanceBugTemplate.objects.create(framework=framework, **dict) dict["framework"] = framework.id @@ -27,7 +27,7 @@ def test_perf_bug_template_api(client, test_perf_framework): # test that we can get just one (the usual case, probably) resp = client.get( - reverse("performance-bug-template-list") + "?framework={}".format(test_perf_framework.id) + reverse("performance-bug-template-list") + f"?framework={test_perf_framework.id}" ) assert resp.status_code == 200 assert resp.json() == [template_dicts[0]] diff --git a/tests/webapp/api/test_performance_data_api.py b/tests/webapp/api/test_performance_data_api.py index 126a04d7698..8bc29282e4f 100644 --- a/tests/webapp/api/test_performance_data_api.py +++ b/tests/webapp/api/test_performance_data_api.py @@ -102,7 +102,7 @@ def test_performance_platforms_expired_test(client, test_perf_signature): "performance-signatures-platforms-list", kwargs={"project": test_perf_signature.repository.name}, ) - + "?interval={}".format(86400) + + "?interval=86400" ) assert resp.status_code == 200 assert resp.json() == [] @@ -140,7 +140,7 @@ def test_performance_platforms_framework_filtering(client, test_perf_signature): "performance-signatures-platforms-list", kwargs={"project": test_perf_signature.repository.name}, ) - + "?framework={}".format(framework2.id) + + f"?framework={framework2.id}" ) assert resp.status_code == 200 assert resp.json() == ["win7-a"] @@ -259,7 +259,7 @@ def test_filter_data_by_no_retriggers( resp = client.get( reverse("performance-data-list", kwargs={"project": test_repository.name}) - + "?signatures={}&no_retriggers=true".format(test_perf_signature.signature_hash) + + f"?signatures={test_perf_signature.signature_hash}&no_retriggers=true" ) assert resp.status_code == 200 datums = resp.data[test_perf_signature.signature_hash] @@ -316,9 +316,7 @@ def test_filter_data_by_framework( # Filtering by second framework resp = client.get( reverse("performance-data-list", kwargs={"project": test_repository.name}) - + "?signatures={}&framework={}".format( - test_perf_signature.signature_hash, signature2.framework.id - ) + + f"?signatures={test_perf_signature.signature_hash}&framework={signature2.framework.id}" ) assert resp.status_code == 200 datums = resp.data[test_perf_signature.signature_hash] @@ -332,7 +330,7 @@ def test_filter_signatures_by_interval(client, test_perf_signature): reverse( "performance-signatures-list", kwargs={"project": test_perf_signature.repository.name} ) - + "?interval={}".format(86400) + + "?interval=86400" ) assert resp.status_code == 200 assert len(resp.json().keys()) == 1 @@ -354,7 +352,7 @@ def test_filter_signatures_by_range( reverse( "performance-signatures-list", kwargs={"project": test_perf_signature.repository.name} ) - + "?start_date={}&end_date={}".format(start_date, end_date) + + f"?start_date={start_date}&end_date={end_date}" ) assert resp.status_code == 200 assert len(resp.json().keys()) == exp_count @@ -387,7 +385,7 @@ def test_filter_data_by_interval( # going back interval of 1 day, should find 1 item resp = client.get( reverse("performance-data-list", kwargs={"project": test_repository.name}) - + "?signature_id={}&interval={}".format(test_perf_signature.id, interval) + + f"?signature_id={test_perf_signature.id}&interval={interval}" ) assert resp.status_code == 200 @@ -424,9 +422,7 @@ def test_filter_data_by_range( resp = client.get( reverse("performance-data-list", kwargs={"project": test_repository.name}) - + "?signature_id={}&start_date={}&end_date={}".format( - test_perf_signature.id, start_date, end_date - ) + + f"?signature_id={test_perf_signature.id}&start_date={start_date}&end_date={end_date}" ) assert resp.status_code == 200 @@ -472,7 +468,7 @@ def test_filter_data_by_signature( ]: resp = client.get( reverse("performance-data-list", kwargs={"project": test_repository.name}) - + "?{}={}".format(param, value) + + f"?{param}={value}" ) assert resp.status_code == 200 assert len(resp.data.keys()) == 1 @@ -719,7 +715,7 @@ def test_alert_summary_tasks_get(client, test_perf_alert_summary, test_perf_data status=PerformanceAlert.REASSIGNED, ) resp = client.get( - reverse("performance-alertsummary-tasks") + "?id={}".format(test_perf_alert_summary.id) + reverse("performance-alertsummary-tasks") + f"?id={test_perf_alert_summary.id}" ) assert resp.status_code == 200 assert resp.json() == { @@ -737,9 +733,7 @@ def test_alert_summary_tasks_get_failure(client, test_perf_alert_summary): # verify that we fail if PerformanceAlertSummary does not exist not_exist_summary_id = test_perf_alert_summary.id test_perf_alert_summary.delete() - resp = client.get( - reverse("performance-alertsummary-tasks") + "?id={}".format(not_exist_summary_id) - ) + resp = client.get(reverse("performance-alertsummary-tasks") + f"?id={not_exist_summary_id}") assert resp.status_code == 400 assert resp.json() == {"message": ["PerformanceAlertSummary does not exist."]} diff --git a/tests/webapp/api/test_version.py b/tests/webapp/api/test_version.py index 62d38c8d1a9..e093ba75c94 100644 --- a/tests/webapp/api/test_version.py +++ b/tests/webapp/api/test_version.py @@ -26,7 +26,7 @@ def test_unsupported_version(): def test_correct_version(): view = RequestVersionView.as_view() version = settings.REST_FRAMEWORK["ALLOWED_VERSIONS"][0] - request = factory.get("/endpoint/", HTTP_ACCEPT="application/json; version={0}".format(version)) + request = factory.get("/endpoint/", HTTP_ACCEPT=f"application/json; version={version}") response = view(request) assert response.data == {"version": version} diff --git a/treeherder/changelog/models.py b/treeherder/changelog/models.py index 51626ac3b77..62b7d925b44 100644 --- a/treeherder/changelog/models.py +++ b/treeherder/changelog/models.py @@ -19,7 +19,7 @@ class Meta: unique_together = ("id", "remote_id", "type") def __str__(self): - return "[%s] %s by %s" % (self.id, self.message, self.author) + return f"[{self.id}] {self.message} by {self.author}" class ChangelogFile(models.Model): diff --git a/treeherder/client/setup.py b/treeherder/client/setup.py index 71cedb709c4..302cd38a5ea 100644 --- a/treeherder/client/setup.py +++ b/treeherder/client/setup.py @@ -1,4 +1,3 @@ -import io import os import re @@ -7,7 +6,7 @@ def read(*names, **kwargs): # Taken from https://packaging.python.org/en/latest/single_source_version.html - with io.open( + with open( os.path.join(os.path.dirname(__file__), *names), encoding=kwargs.get("encoding", "utf8") ) as fp: return fp.read() diff --git a/treeherder/client/thclient/client.py b/treeherder/client/thclient/client.py index 890ef1214cd..69a798acb72 100644 --- a/treeherder/client/thclient/client.py +++ b/treeherder/client/thclient/client.py @@ -17,8 +17,8 @@ class TreeherderClient: API_VERSION = "1.1" REQUEST_HEADERS = { - "Accept": "application/json; version={}".format(API_VERSION), - "User-Agent": "treeherder-pyclient/{}".format(__version__), + "Accept": f"application/json; version={API_VERSION}", + "User-Agent": f"treeherder-pyclient/{__version__}", } PUSH_ENDPOINT = "push" @@ -43,9 +43,9 @@ def __init__(self, server_url="https://treeherder.mozilla.org", timeout=30): def _get_endpoint_url(self, endpoint, project=None): if project: - return "{}/api/project/{}/{}/".format(self.server_url, project, endpoint) + return f"{self.server_url}/api/project/{project}/{endpoint}/" - return "{}/api/{}/".format(self.server_url, endpoint) + return f"{self.server_url}/api/{endpoint}/" def _get_json_list(self, endpoint, project=None, **params): if "count" in params and (params["count"] is None or params["count"] > self.MAX_COUNT): diff --git a/treeherder/etl/management/commands/ingest.py b/treeherder/etl/management/commands/ingest.py index f86c75b7c14..366d0042cfc 100644 --- a/treeherder/etl/management/commands/ingest.py +++ b/treeherder/etl/management/commands/ingest.py @@ -38,7 +38,7 @@ conn_sem = BoundedSemaphore(50) -class Connection(object): +class Connection: def __enter__(self): conn_sem.acquire() @@ -51,15 +51,15 @@ def ingest_pr(pr_url, root_url): _, _, _, org, repo, _, pull_number, _ = pr_url.split("/", 7) pulse = { "exchange": "exchange/taskcluster-github/v1/pull-request", - "routingKey": "primary.{}.{}.synchronize".format(org, repo), + "routingKey": f"primary.{org}.{repo}.synchronize", "payload": { "repository": repo, "organization": org, "action": "synchronize", "details": { "event.pullNumber": pull_number, - "event.base.repo.url": "https://github.com/{}/{}.git".format(org, repo), - "event.head.repo.url": "https://github.com/{}/{}.git".format(org, repo), + "event.base.repo.url": f"https://github.com/{org}/{repo}.git", + "event.head.repo.url": f"https://github.com/{org}/{repo}.git", }, }, } @@ -233,10 +233,10 @@ def process_job_with_threads(pulse_job, root_url): def find_task_id(index_path, root_url): - index_url = liburls.api(root_url, "index", "v1", "task/{}".format(index_path)) + index_url = liburls.api(root_url, "index", "v1", f"task/{index_path}") response = requests.get(index_url) if response.status_code == 404: - raise Exception("Index URL {} not found".format(index_url)) + raise Exception(f"Index URL {index_url} not found") return response.json()["taskId"] @@ -248,7 +248,7 @@ def get_decision_task_id(project, revision, root_url): def repo_meta(project): _repo = Repository.objects.filter(name=project)[0] - assert _repo, "The project {} you specified is incorrect".format(project) + assert _repo, f"The project {project} you specified is incorrect" splitUrl = _repo.url.split("/") return { "url": _repo.url, @@ -388,9 +388,7 @@ def ingest_git_pushes(project, dry_run=False): oldest_parent_revision = info["parents"][0]["sha"] push_to_date[oldest_parent_revision] = info["commit"]["committer"]["date"] logger.info( - "Push: {} - Date: {}".format( - oldest_parent_revision, push_to_date[oldest_parent_revision] - ) + f"Push: {oldest_parent_revision} - Date: {push_to_date[oldest_parent_revision]}" ) push_revision.append(_commit["sha"]) diff --git a/treeherder/etl/management/commands/publish_to_pulse.py b/treeherder/etl/management/commands/publish_to_pulse.py index 33e3c54a32c..58ddb8254e4 100644 --- a/treeherder/etl/management/commands/publish_to_pulse.py +++ b/treeherder/etl/management/commands/publish_to_pulse.py @@ -33,13 +33,13 @@ def handle(self, *args, **options): userid = urlparse(connection_url).username payload_file = options["payload_file"] - exchange_name = "exchange/{}/jobs".format(userid) + exchange_name = f"exchange/{userid}/jobs" connection = Connection(connection_url) exchange = Exchange(exchange_name, type="topic") producer = Producer(connection, exchange, routing_key=routing_key, auto_declare=True) - self.stdout.write("Published to exchange: {}".format(exchange_name)) + self.stdout.write(f"Published to exchange: {exchange_name}") with open(payload_file) as f: body = f.read() diff --git a/treeherder/etl/management/commands/pulse_listener.py b/treeherder/etl/management/commands/pulse_listener.py index 372027a3e53..34eafd9af6e 100644 --- a/treeherder/etl/management/commands/pulse_listener.py +++ b/treeherder/etl/management/commands/pulse_listener.py @@ -41,7 +41,7 @@ def handle(self, *args, **options): ], ) - listener_params = (JointConsumer, pulse_sources, [lambda key: "#.{}".format(key), None]) + listener_params = (JointConsumer, pulse_sources, [lambda key: f"#.{key}", None]) consumer = prepare_joint_consumers(listener_params) try: diff --git a/treeherder/etl/management/commands/pulse_listener_tasks.py b/treeherder/etl/management/commands/pulse_listener_tasks.py index 000321189a2..68f30d5a797 100644 --- a/treeherder/etl/management/commands/pulse_listener_tasks.py +++ b/treeherder/etl/management/commands/pulse_listener_tasks.py @@ -36,7 +36,7 @@ def handle(self, *args, **options): consumers = prepare_consumers( TaskConsumer, task_sources, - lambda key: "#.{}".format(key), + lambda key: f"#.{key}", ) try: diff --git a/treeherder/etl/management/commands/pulse_listener_tasks_classification.py b/treeherder/etl/management/commands/pulse_listener_tasks_classification.py index e0768515264..a61ccee1829 100644 --- a/treeherder/etl/management/commands/pulse_listener_tasks_classification.py +++ b/treeherder/etl/management/commands/pulse_listener_tasks_classification.py @@ -38,7 +38,7 @@ def handle(self, *args, **options): consumers = prepare_consumers( MozciClassificationConsumer, classification_sources, - lambda key: "#.{}".format(key), + lambda key: f"#.{key}", ) try: diff --git a/treeherder/etl/perf.py b/treeherder/etl/perf.py index 182f943a6c8..fef40e2a309 100644 --- a/treeherder/etl/perf.py +++ b/treeherder/etl/perf.py @@ -2,7 +2,7 @@ import logging from datetime import datetime from hashlib import sha1 -from typing import List, Optional, Tuple +from typing import Optional import simplejson as json @@ -51,7 +51,7 @@ def _get_signature_hash(signature_properties): return sha.hexdigest() -def _order_and_concat(words: List) -> str: +def _order_and_concat(words: list) -> str: return " ".join(sorted(words)) @@ -76,7 +76,7 @@ def _create_or_update_signature(repository, signature_hash, framework, applicati return signature -def _deduce_push_timestamp(perf_datum: dict, job_push_time: datetime) -> Tuple[datetime, bool]: +def _deduce_push_timestamp(perf_datum: dict, job_push_time: datetime) -> tuple[datetime, bool]: is_multi_commit = False if not settings.PERFHERDER_ENABLE_MULTIDATA_INGESTION: # the old way of ingestion @@ -119,7 +119,7 @@ def _test_should_alert_based_on( def _test_should_gather_replicates_based_on( - repository: Repository, suite_name: str, replicates: Optional[List] = None + repository: Repository, suite_name: str, replicates: Optional[list] = None ) -> bool: """ Determine if we should gather/ingest replicates. Currently, it's diff --git a/treeherder/etl/push_loader.py b/treeherder/etl/push_loader.py index 4d64b419f15..e41687c19b9 100644 --- a/treeherder/etl/push_loader.py +++ b/treeherder/etl/push_loader.py @@ -58,7 +58,7 @@ def get_transformer_class(self, exchange): return GithubPullRequestTransformer elif "/hgpushes/" in exchange: return HgPushTransformer - raise PulsePushError("Unsupported push exchange: {}".format(exchange)) + raise PulsePushError(f"Unsupported push exchange: {exchange}") class GithubTransformer: @@ -156,7 +156,7 @@ def get_branch(self): if self.message_body["details"].get("event.head.tag"): return "tag" - return super(GithubPushTransformer, self).get_branch() + return super().get_branch() def transform(self, repository): push_data = compare_shas( diff --git a/treeherder/etl/pushlog.py b/treeherder/etl/pushlog.py index 49e703bca6f..2ff234bb805 100644 --- a/treeherder/etl/pushlog.py +++ b/treeherder/etl/pushlog.py @@ -53,14 +53,14 @@ def transform_push(self, push): } def run(self, source_url, repository_name, changeset=None, last_push_id=None): - cache_key = "{}:last_push_id".format(repository_name) + cache_key = f"{repository_name}:last_push_id" if not last_push_id: # get the last object seen from cache. this will # reduce the number of pushes processed every time last_push_id = cache.get(cache_key) if not changeset and last_push_id: - startid_url = "{}&startID={}".format(source_url, last_push_id) + startid_url = f"{source_url}&startID={last_push_id}" logger.debug( "Extracted last push for '%s', '%s', from cache, " "attempting to get changes only from that point at: %s", diff --git a/treeherder/etl/taskcluster_pulse/handler.py b/treeherder/etl/taskcluster_pulse/handler.py index 57f9944f862..a2cfc15692c 100644 --- a/treeherder/etl/taskcluster_pulse/handler.py +++ b/treeherder/etl/taskcluster_pulse/handler.py @@ -77,7 +77,7 @@ def parseRouteInfo(prefix, taskId, routes, task): raise PulseHandlerError( "Could not determine Treeherder route. Either there is no route, " + "or more than one matching route exists." - + "Task ID: {taskId} Routes: {routes}".format(taskId=taskId, routes=routes) + + f"Task ID: {taskId} Routes: {routes}" ) parsedRoute = parseRoute(matchingRoutes[0]) @@ -156,7 +156,7 @@ def ignore_task(task, taskId, rootUrl, project): break if ignore: - logger.debug("Task to be ignored ({})".format(taskId)) + logger.debug(f"Task to be ignored ({taskId})") return ignore @@ -225,7 +225,7 @@ def buildMessage(pushInfo, task, runId, payload): job = { "buildSystem": "taskcluster", "owner": task["metadata"]["owner"], - "taskId": "{taskId}/{runId}".format(taskId=slugid.decode(taskId), runId=runId), + "taskId": f"{slugid.decode(taskId)}/{runId}", "retryId": runId, "isRetried": False, "display": { @@ -397,7 +397,7 @@ async def addArtifactUploadedLinks(root_url, taskId, runId, job, session): seen[name] = [artifact["name"]] else: seen[name].append(artifact["name"]) - name = "{name} ({length})".format(name=name, length=len(seen[name]) - 1) + name = f"{name} ({len(seen[name]) - 1})" links.append( { diff --git a/treeherder/etl/text.py b/treeherder/etl/text.py index e327600f372..10dd2a0292d 100644 --- a/treeherder/etl/text.py +++ b/treeherder/etl/text.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import re # Regexp that matches all non-BMP unicode characters. @@ -19,7 +18,7 @@ def convert_unicode_character_to_ascii_repr(match_obj): hex_value = hex_code_point.zfill(6).upper() - return "".format(hex_value) + return f"" def astral_filter(text): diff --git a/treeherder/intermittents_commenter/commenter.py b/treeherder/intermittents_commenter/commenter.py index c7c597c6a83..5a1e9aacb65 100644 --- a/treeherder/intermittents_commenter/commenter.py +++ b/treeherder/intermittents_commenter/commenter.py @@ -175,7 +175,7 @@ def print_or_submit_changes(self, all_bug_changes): ) def open_file(self, filename, load): - with open("treeherder/intermittents_commenter/{}".format(filename), "r") as myfile: + with open(f"treeherder/intermittents_commenter/{filename}") as myfile: if load: return json.load(myfile) else: @@ -212,7 +212,7 @@ def new_request(self): # Use a custom HTTP adapter, so we can set a non-zero max_retries value. session.mount("https://", requests.adapters.HTTPAdapter(max_retries=3)) session.headers = { - "User-Agent": "treeherder/{}".format(settings.SITE_HOSTNAME), + "User-Agent": f"treeherder/{settings.SITE_HOSTNAME}", "x-bugzilla-api-key": settings.COMMENTER_API_KEY, "Accept": "application/json", } @@ -233,7 +233,7 @@ def fetch_bug_details(self, bug_ids): ) response.raise_for_status() except RequestException as e: - logger.warning("error fetching bugzilla metadata for bugs due to {}".format(e)) + logger.warning(f"error fetching bugzilla metadata for bugs due to {e}") return None if response.headers["Content-Type"] == "text/html; charset=UTF-8": @@ -246,12 +246,12 @@ def fetch_bug_details(self, bug_ids): return data["bugs"] def submit_bug_changes(self, changes, bug_id): - url = "{}/rest/bug/{}".format(settings.BZ_API_URL, str(bug_id)) + url = f"{settings.BZ_API_URL}/rest/bug/{str(bug_id)}" try: response = self.session.put(url, headers=self.session.headers, json=changes, timeout=30) response.raise_for_status() except RequestException as e: - logger.error("error posting comment to bugzilla for bug {} due to {}".format(bug_id, e)) + logger.error(f"error posting comment to bugzilla for bug {bug_id} due to {e}") def get_test_runs(self, startday, endday): """Returns an aggregate of pushes for specified date range and diff --git a/treeherder/log_parser/failureline.py b/treeherder/log_parser/failureline.py index e72d0a660c9..4171620765f 100644 --- a/treeherder/log_parser/failureline.py +++ b/treeherder/log_parser/failureline.py @@ -53,7 +53,7 @@ def write_failure_lines(job_log, log_iter): try: failure_lines = create(job_log, log_list) except DataError as e: - logger.warning("Got DataError inserting failure_line: {}".format(e.args)) + logger.warning(f"Got DataError inserting failure_line: {e.args}") except OperationalError as e: logger.warning("Got OperationalError inserting failure_line") # Retry iff this error is the "incorrect String Value" error diff --git a/treeherder/log_parser/management/commands/test_parse_log.py b/treeherder/log_parser/management/commands/test_parse_log.py index 534f84fba90..6623682a5ce 100644 --- a/treeherder/log_parser/management/commands/test_parse_log.py +++ b/treeherder/log_parser/management/commands/test_parse_log.py @@ -40,7 +40,7 @@ def handle(self, *args, **options): if not options["profile"]: for name, artifact in artifact_bc.artifacts.items(): - print("%s, %s" % (name, json.dumps(artifact, indent=2))) + print(f"{name}, {json.dumps(artifact, indent=2)}") if options["profile"]: print("Timings: %s" % times) diff --git a/treeherder/log_parser/parsers.py b/treeherder/log_parser/parsers.py index 2c9e5c18509..08406da5387 100644 --- a/treeherder/log_parser/parsers.py +++ b/treeherder/log_parser/parsers.py @@ -64,27 +64,23 @@ class ErrorParser(ParserBase): ) RE_ERR_MATCH = re.compile( - ( - r"^g?make(?:\[\d+\])?: \*\*\*" - r"|^[A-Za-z.]+Error: " - r"|^[A-Za-z.]*Exception: " - r"|^\[ FAILED \] " - r"|^remoteFailed:" - r"|^rm: cannot " - r"|^abort:" - r"|^\[taskcluster\] Error:" - r"|^\[[\w._-]+:(?:error|exception)\]" - ) + r"^g?make(?:\[\d+\])?: \*\*\*" + r"|^[A-Za-z.]+Error: " + r"|^[A-Za-z.]*Exception: " + r"|^\[ FAILED \] " + r"|^remoteFailed:" + r"|^rm: cannot " + r"|^abort:" + r"|^\[taskcluster\] Error:" + r"|^\[[\w._-]+:(?:error|exception)\]" ) RE_ERR_SEARCH = re.compile( - ( - r" error\(\d*\):" - r"|:\d+: error:" - r"| error R?C\d*:" - r"|ERROR [45]\d\d:" - r"|mozmake\.(?:exe|EXE)(?:\[\d+\])?: \*\*\*" - ) + r" error\(\d*\):" + r"|:\d+: error:" + r"| error R?C\d*:" + r"|ERROR [45]\d\d:" + r"|mozmake\.(?:exe|EXE)(?:\[\d+\])?: \*\*\*" ) RE_EXCLUDE_1_SEARCH = re.compile(r"TEST-(?:INFO|PASS) ") diff --git a/treeherder/log_parser/utils.py b/treeherder/log_parser/utils.py index b81fe765edd..833287a9279 100644 --- a/treeherder/log_parser/utils.py +++ b/treeherder/log_parser/utils.py @@ -21,7 +21,7 @@ def validate_perf_data(performance_data: dict): for suite in performance_data["suites"]: # allow only one extraOption longer than 45 if len(_long_options(_extra_options(suite), *expected_range)) > 1: - raise ValidationError("Too many extra options longer than {}".format(SECOND_MAX_LENGTH)) + raise ValidationError(f"Too many extra options longer than {SECOND_MAX_LENGTH}") def _long_options(all_extra_options: list, second_max: int, first_max: int): diff --git a/treeherder/model/data_cycling/cyclers.py b/treeherder/model/data_cycling/cyclers.py index a4ef1599177..04346df86f0 100644 --- a/treeherder/model/data_cycling/cyclers.py +++ b/treeherder/model/data_cycling/cyclers.py @@ -1,7 +1,6 @@ import logging from abc import ABC, abstractmethod from datetime import timedelta, datetime -from typing import List from django.db import OperationalError, connection from django.db.backends.utils import CursorWrapper @@ -69,9 +68,9 @@ def cycle(self): rs_deleted = Job.objects.cycle_data( self.cycle_interval, self.chunk_size, self.sleep_time ) - logger.warning("Deleted {} jobs".format(rs_deleted)) + logger.warning(f"Deleted {rs_deleted} jobs") except OperationalError as e: - logger.error("Error running cycle_data: {}".format(e)) + logger.error(f"Error running cycle_data: {e}") self._remove_leftovers() @@ -79,17 +78,17 @@ def _remove_leftovers(self): logger.warning("Pruning ancillary data: job types, groups and machines") def prune(reference_model, id_name, model): - logger.warning("Pruning {}s".format(model.__name__)) + logger.warning(f"Pruning {model.__name__}s") used_ids = ( reference_model.objects.only(id_name).values_list(id_name, flat=True).distinct() ) unused_ids = model.objects.exclude(id__in=used_ids).values_list("id", flat=True) - logger.warning("Removing {} records from {}".format(len(unused_ids), model.__name__)) + logger.warning(f"Removing {len(unused_ids)} records from {model.__name__}") while len(unused_ids): delete_ids = unused_ids[: self.chunk_size] - logger.warning("deleting {} of {}".format(len(delete_ids), len(unused_ids))) + logger.warning(f"deleting {len(delete_ids)} of {len(unused_ids)}") model.objects.filter(id__in=delete_ids).delete() unused_ids = unused_ids[self.chunk_size :] @@ -111,7 +110,7 @@ def __init__( sleep_time: int, is_debug: bool = None, days: int = None, - strategies: List[RemovalStrategy] = None, + strategies: list[RemovalStrategy] = None, **kwargs, ): super().__init__(chunk_size, sleep_time, is_debug) @@ -223,9 +222,7 @@ def _delete_in_chunks(self, strategy: RemovalStrategy): break # either finished removing all expired data or failed else: any_successful_attempt = True - logger.debug( - "Successfully deleted {} performance datum rows".format(deleted_rows) - ) + logger.debug(f"Successfully deleted {deleted_rows} performance datum rows") def __handle_chunk_removal_exception( self, exception, cursor: CursorWrapper, any_successful_attempt: bool diff --git a/treeherder/model/data_cycling/removal_strategies.py b/treeherder/model/data_cycling/removal_strategies.py index 2f9fc6469cb..4470bb1c537 100644 --- a/treeherder/model/data_cycling/removal_strategies.py +++ b/treeherder/model/data_cycling/removal_strategies.py @@ -4,7 +4,6 @@ from abc import ABC, abstractmethod from datetime import timedelta, datetime from itertools import cycle -from typing import List from django.conf import settings from django.db.backends.utils import CursorWrapper @@ -48,7 +47,7 @@ def name(self) -> str: pass @staticmethod - def fabricate_all_strategies(*args, **kwargs) -> List[RemovalStrategy]: + def fabricate_all_strategies(*args, **kwargs) -> list[RemovalStrategy]: return [ MainRemovalStrategy(*args, **kwargs), TryDataRemoval(*args, **kwargs), @@ -364,7 +363,7 @@ def target_signature(self) -> PerformanceSignature: return self._target_signature @property - def removable_signatures(self) -> List[PerformanceSignature]: + def removable_signatures(self) -> list[PerformanceSignature]: if self._removable_signatures is None: self._removable_signatures = list( PerformanceSignature.objects.filter(last_updated__lte=self._max_timestamp).order_by( diff --git a/treeherder/model/data_cycling/signature_remover.py b/treeherder/model/data_cycling/signature_remover.py index 605f764d8be..46ca24e3e41 100644 --- a/treeherder/model/data_cycling/signature_remover.py +++ b/treeherder/model/data_cycling/signature_remover.py @@ -1,5 +1,4 @@ import logging -from typing import List import taskcluster from django.conf import settings @@ -85,7 +84,7 @@ def _delete(chunk_of_signatures): def _send_email(self): self._notify.email(self._email_writer.email) - def __delete_and_notify(self, signatures: List[PerformanceSignature]) -> bool: + def __delete_and_notify(self, signatures: list[PerformanceSignature]) -> bool: """ Atomically deletes perf signatures & notifies about this. @return: whether atomic operation was successful or not @@ -104,5 +103,5 @@ def __delete_and_notify(self, signatures: List[PerformanceSignature]) -> bool: return True - def _prepare_notification(self, signatures: List[PerformanceSignature]): + def _prepare_notification(self, signatures: list[PerformanceSignature]): self._email_writer.prepare_new_email(signatures) diff --git a/treeherder/model/error_summary.py b/treeherder/model/error_summary.py index 00b42671515..5c6278a05de 100644 --- a/treeherder/model/error_summary.py +++ b/treeherder/model/error_summary.py @@ -32,7 +32,7 @@ def get_error_summary(job, queryset=None): Caches the results if there are any. """ - cache_key = "error-summary-{}".format(job.id) + cache_key = f"error-summary-{job.id}" cached_error_summary = cache.get(cache_key) if cached_error_summary is not None: return cached_error_summary diff --git a/treeherder/model/management/commands/cache_failure_history.py b/treeherder/model/management/commands/cache_failure_history.py index 7925da70990..429c2f3f0e8 100644 --- a/treeherder/model/management/commands/cache_failure_history.py +++ b/treeherder/model/management/commands/cache_failure_history.py @@ -37,7 +37,7 @@ def handle(self, *args, **options): self.is_debug = options["debug"] days = options["days"] - self.debug("Fetching {} sets of history...".format(days)) + self.debug(f"Fetching {days} sets of history...") option_map = OptionCollection.objects.get_option_collection_map() repository_ids = REPO_GROUPS["trunk"] diff --git a/treeherder/model/migrations/0001_squashed_0022_modify_bugscache_and_bugjobmap.py b/treeherder/model/migrations/0001_squashed_0022_modify_bugscache_and_bugjobmap.py index 919060e0d8d..703156e6d78 100644 --- a/treeherder/model/migrations/0001_squashed_0022_modify_bugscache_and_bugjobmap.py +++ b/treeherder/model/migrations/0001_squashed_0022_modify_bugscache_and_bugjobmap.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-03-08 11:41 import django.core.validators import django.db.models.deletion diff --git a/treeherder/model/migrations/0002_add_bugjobmap_model_manager.py b/treeherder/model/migrations/0002_add_bugjobmap_model_manager.py index 6d6ed0465f3..66da6c03dad 100644 --- a/treeherder/model/migrations/0002_add_bugjobmap_model_manager.py +++ b/treeherder/model/migrations/0002_add_bugjobmap_model_manager.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.12 on 2018-04-30 16:50 from django.db import migrations import django.db.models.manager diff --git a/treeherder/model/migrations/0003_add_matcher_name_fields.py b/treeherder/model/migrations/0003_add_matcher_name_fields.py index 50c7a63b070..650da8dc06a 100644 --- a/treeherder/model/migrations/0003_add_matcher_name_fields.py +++ b/treeherder/model/migrations/0003_add_matcher_name_fields.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-05-18 08:11 from django.db import migrations, models diff --git a/treeherder/model/migrations/0004_populate_matcher_name_fields.py b/treeherder/model/migrations/0004_populate_matcher_name_fields.py index 793039c1111..07491ddad77 100644 --- a/treeherder/model/migrations/0004_populate_matcher_name_fields.py +++ b/treeherder/model/migrations/0004_populate_matcher_name_fields.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-05-18 08:11 from django.db import migrations diff --git a/treeherder/model/migrations/0005_use_matcher_name_for_unique_constraint.py b/treeherder/model/migrations/0005_use_matcher_name_for_unique_constraint.py index 9ff2d120510..75edc1183a5 100644 --- a/treeherder/model/migrations/0005_use_matcher_name_for_unique_constraint.py +++ b/treeherder/model/migrations/0005_use_matcher_name_for_unique_constraint.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-05-18 08:23 from django.db import migrations diff --git a/treeherder/model/migrations/0006_drop_matcher_fks.py b/treeherder/model/migrations/0006_drop_matcher_fks.py index e9362a6a8fc..b84a93e39fe 100644 --- a/treeherder/model/migrations/0006_drop_matcher_fks.py +++ b/treeherder/model/migrations/0006_drop_matcher_fks.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-05-18 08:30 from django.db import migrations diff --git a/treeherder/model/migrations/0007_remove_m2m_between_classified_failures_and_failure_match.py b/treeherder/model/migrations/0007_remove_m2m_between_classified_failures_and_failure_match.py index e48926ad9a5..f9da764698e 100644 --- a/treeherder/model/migrations/0007_remove_m2m_between_classified_failures_and_failure_match.py +++ b/treeherder/model/migrations/0007_remove_m2m_between_classified_failures_and_failure_match.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-05 09:29 from django.db import migrations diff --git a/treeherder/model/migrations/0008_remove_failure_match.py b/treeherder/model/migrations/0008_remove_failure_match.py index 8d1f456a7a3..98a05119bc2 100644 --- a/treeherder/model/migrations/0008_remove_failure_match.py +++ b/treeherder/model/migrations/0008_remove_failure_match.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-05 09:40 from django.db import migrations diff --git a/treeherder/model/migrations/0009_add_manager_to_push_and_job.py b/treeherder/model/migrations/0009_add_manager_to_push_and_job.py index 2eab91e4568..06fff69d146 100644 --- a/treeherder/model/migrations/0009_add_manager_to_push_and_job.py +++ b/treeherder/model/migrations/0009_add_manager_to_push_and_job.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2018-09-18 18:21 from django.db import migrations import django.db.models.manager diff --git a/treeherder/model/migrations/0010_remove_runnable_job.py b/treeherder/model/migrations/0010_remove_runnable_job.py index 6e1531cfe15..71141c13094 100644 --- a/treeherder/model/migrations/0010_remove_runnable_job.py +++ b/treeherder/model/migrations/0010_remove_runnable_job.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2018-09-26 21:21 from django.db import migrations diff --git a/treeherder/model/migrations/0011_remove_matcher_table.py b/treeherder/model/migrations/0011_remove_matcher_table.py index a3bb74630d5..315c8d4a46a 100644 --- a/treeherder/model/migrations/0011_remove_matcher_table.py +++ b/treeherder/model/migrations/0011_remove_matcher_table.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-06 09:25 from django.db import migrations diff --git a/treeherder/model/migrations/0012_branch_maxlen.py b/treeherder/model/migrations/0012_branch_maxlen.py index b68eb90920a..06052e1c25d 100644 --- a/treeherder/model/migrations/0012_branch_maxlen.py +++ b/treeherder/model/migrations/0012_branch_maxlen.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.16 on 2018-12-13 20:29 from django.db import migrations, models diff --git a/treeherder/model/migrations/0013_add_index_to_push_revision.py b/treeherder/model/migrations/0013_add_index_to_push_revision.py index d6aaf1e2609..ef5f3e16a19 100644 --- a/treeherder/model/migrations/0013_add_index_to_push_revision.py +++ b/treeherder/model/migrations/0013_add_index_to_push_revision.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2019-01-02 23:34 from django.db import migrations, models diff --git a/treeherder/model/migrations/0015_add_repository_tc_root_url.py b/treeherder/model/migrations/0015_add_repository_tc_root_url.py index 3adc6eb2a11..e9f10afb0fc 100644 --- a/treeherder/model/migrations/0015_add_repository_tc_root_url.py +++ b/treeherder/model/migrations/0015_add_repository_tc_root_url.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 2.2.4 on 2019-08-28 17:39 from django.db import migrations, models diff --git a/treeherder/model/models.py b/treeherder/model/models.py index 4adfe7baab9..0aa283097d4 100644 --- a/treeherder/model/models.py +++ b/treeherder/model/models.py @@ -4,7 +4,6 @@ import re import time from hashlib import sha1 -from typing import List import warnings @@ -80,7 +79,7 @@ class Meta: unique_together = ("os_name", "platform", "architecture") def __str__(self): - return "{0} {1} {2}".format(self.os_name, self.platform, self.architecture) + return f"{self.os_name} {self.platform} {self.architecture}" class Option(NamedModel): @@ -117,11 +116,11 @@ class Meta: verbose_name_plural = "repositories" @classmethod - def fetch_all_names(cls) -> List[str]: + def fetch_all_names(cls) -> list[str]: return cls.objects.values_list("name", flat=True) def __str__(self): - return "{0} {1}".format(self.name, self.repository_group) + return f"{self.name} {self.repository_group}" class Push(models.Model): @@ -145,7 +144,7 @@ class Meta: unique_together = ("repository", "revision") def __str__(self): - return "{0} {1}".format(self.repository.name, self.revision) + return f"{self.repository.name} {self.revision}" def total_jobs(self, job_type, result): return self.jobs.filter(job_type=job_type, result=result).count() @@ -194,7 +193,7 @@ class Meta: unique_together = ("push", "revision") def __str__(self): - return "{0} {1}".format(self.push.repository.name, self.revision) + return f"{self.push.repository.name} {self.revision}" class MachinePlatform(models.Model): @@ -208,7 +207,7 @@ class Meta: unique_together = ("os_name", "platform", "architecture") def __str__(self): - return "{0} {1} {2}".format(self.os_name, self.platform, self.architecture) + return f"{self.os_name} {self.platform} {self.architecture}" class Bugscache(models.Model): @@ -232,7 +231,7 @@ class Meta: ] def __str__(self): - return "{0}".format(self.id) + return f"{self.id}" @classmethod def sanitized_search_term(self, search_term): @@ -322,7 +321,7 @@ class Meta: unique_together = ("product", "component") def __str__(self): - return "{0} :: {1}".format(self.product, self.component) + return f"{self.product} :: {self.component}" class FilesBugzillaMap(models.Model): @@ -335,7 +334,7 @@ class Meta: verbose_name_plural = "files_bugzilla_components" def __str__(self): - return "{0}".format(self.path) + return f"{self.path}" class BugzillaSecurityGroup(models.Model): @@ -363,7 +362,7 @@ class Meta: unique_together = ("name", "symbol") def __str__(self): - return "{0} ({1})".format(self.name, self.symbol) + return f"{self.name} ({self.symbol})" class OptionCollectionManager(models.Manager): @@ -413,7 +412,7 @@ class Meta: unique_together = ("option_collection_hash", "option") def __str__(self): - return "{0}".format(self.option) + return f"{self.option}" class JobType(models.Model): @@ -427,7 +426,7 @@ class Meta: unique_together = (("name", "symbol"),) def __str__(self): - return "{0} ({1})".format(self.name, self.symbol) + return f"{self.name} ({self.symbol})" class FailureClassification(NamedModel): @@ -602,7 +601,7 @@ def tier_is_sheriffable(self) -> bool: return self.tier < 3 def __str__(self): - return "{0} {1} {2}".format(self.id, self.repository, self.guid) + return f"{self.id} {self.repository} {self.guid}" def get_platform_option(self, option_collection_map=None): if not hasattr(self, "platform_option"): @@ -723,7 +722,7 @@ class Meta: unique_together = ("job", "name", "url") def __str__(self): - return "{0} {1} {2} {3}".format(self.id, self.job.guid, self.name, self.status) + return f"{self.id} {self.job.guid} {self.name} {self.status}" def update_status(self, status): self.status = status @@ -793,7 +792,7 @@ def create(cls, job_id, bug_id, user=None): return bug_map def __str__(self): - return "{0} {1} {2} {3}".format(self.id, self.job.guid, self.bug_id, self.user) + return f"{self.id} {self.job.guid} {self.bug_id} {self.user}" class JobNote(models.Model): @@ -899,9 +898,7 @@ def delete(self, *args, **kwargs): self._ensure_classification() def __str__(self): - return "{0} {1} {2} {3}".format( - self.id, self.job.guid, self.failure_classification, self.who - ) + return f"{self.id} {self.job.guid} {self.failure_classification} {self.who}" class FailureLine(models.Model): @@ -959,7 +956,7 @@ class Meta: unique_together = ("job_log", "line") def __str__(self): - return "{0} {1}".format(self.id, Job.objects.get(guid=self.job_guid).id) + return f"{self.id} {Job.objects.get(guid=self.job_guid).id}" @property def error(self): @@ -1116,7 +1113,7 @@ class ClassifiedFailure(models.Model): modified = models.DateTimeField(auto_now=True) def __str__(self): - return "{0} {1}".format(self.id, self.bug_number) + return f"{self.id} {self.bug_number}" def bug(self): # Putting this here forces one query per object; there should be a way @@ -1256,7 +1253,7 @@ class Meta: unique_together = (("step", "line_number"), ("job", "line_number")) def __str__(self): - return "{0} {1}".format(self.id, self.job.id) + return f"{self.id} {self.job.id}" @property def metadata(self): @@ -1387,7 +1384,7 @@ class Meta: unique_together = ("text_log_error", "classified_failure", "matcher_name") def __str__(self): - return "{0} {1}".format(self.text_log_error.id, self.classified_failure.id) + return f"{self.text_log_error.id} {self.classified_failure.id}" class InvestigatedTests(models.Model): diff --git a/treeherder/perf/auto_perf_sheriffing/backfill_reports.py b/treeherder/perf/auto_perf_sheriffing/backfill_reports.py index f80c79c6308..5bf0a061c01 100644 --- a/treeherder/perf/auto_perf_sheriffing/backfill_reports.py +++ b/treeherder/perf/auto_perf_sheriffing/backfill_reports.py @@ -1,7 +1,7 @@ import logging from datetime import timedelta, datetime from itertools import zip_longest, groupby -from typing import Tuple, List, Optional +from typing import Optional import simplejson as json from django.db.models import QuerySet, Q, F @@ -24,7 +24,7 @@ class AlertsPicker: """ def __init__( - self, max_alerts: int, max_improvements: int, platforms_of_interest: Tuple[str, ...] + self, max_alerts: int, max_improvements: int, platforms_of_interest: tuple[str, ...] ): """ :param max_alerts: the maximum number of selected alerts @@ -49,7 +49,7 @@ def __init__( self.max_improvements = max_improvements self.ordered_platforms_of_interest = platforms_of_interest - def extract_important_alerts(self, alerts: Tuple[PerformanceAlert, ...]): + def extract_important_alerts(self, alerts: tuple[PerformanceAlert, ...]): if any(not isinstance(alert, PerformanceAlert) for alert in alerts): raise ValueError("Provided parameter does not contain only PerformanceAlert objects.") relevant_alerts = self._extract_by_relevant_platforms(alerts) @@ -57,7 +57,7 @@ def extract_important_alerts(self, alerts: Tuple[PerformanceAlert, ...]): sorted_alerts = self._multi_criterion_sort(alerts_with_distinct_jobs) return self._ensure_alerts_variety(sorted_alerts) - def _ensure_alerts_variety(self, sorted_alerts: List[PerformanceAlert]): + def _ensure_alerts_variety(self, sorted_alerts: list[PerformanceAlert]): """ The alerts container must be sorted before being passed to this function. The returned list must contain regressions and (if present) improvements. @@ -81,12 +81,12 @@ def _ensure_alerts_variety(self, sorted_alerts: List[PerformanceAlert]): : self.max_improvements if improvements_only else self.max_alerts ] - def _ensure_distinct_jobs(self, alerts: List[PerformanceAlert]) -> List[PerformanceAlert]: + def _ensure_distinct_jobs(self, alerts: list[PerformanceAlert]) -> list[PerformanceAlert]: def initial_culprit_job(alert): return alert.initial_culprit_job def parent_or_sibling_from( - alert_group: List[PerformanceAlert], + alert_group: list[PerformanceAlert], ) -> Optional[PerformanceAlert]: if len(alert_group) == 0: return None @@ -105,8 +105,8 @@ def parent_or_sibling_from( return list(filter(None, alerts)) def _ensure_platform_variety( - self, sorted_all_alerts: List[PerformanceAlert] - ) -> List[PerformanceAlert]: + self, sorted_all_alerts: list[PerformanceAlert] + ) -> list[PerformanceAlert]: """ Note: Ensure that the sorted_all_alerts container has only platforms of interest (example: 'windows10', 'windows7', 'linux', 'osx', 'android'). @@ -191,7 +191,7 @@ def __init__(self, max_data_points: int, time_interval: timedelta, logger=None): self._time_interval = time_interval self.log = logger or logging.getLogger(self.__class__.__name__) - def __call__(self, alert: PerformanceAlert) -> List[dict]: + def __call__(self, alert: PerformanceAlert) -> list[dict]: """ Main method """ @@ -238,7 +238,7 @@ def _fetch_suspect_data_points(self, alert: PerformanceAlert) -> QuerySet: ) return annotated_data_points - def _one_data_point_per_push(self, annotated_data_points: QuerySet) -> List[dict]: + def _one_data_point_per_push(self, annotated_data_points: QuerySet) -> list[dict]: seen_push_ids = set() seen_add = seen_push_ids.add return [ @@ -247,7 +247,7 @@ def _one_data_point_per_push(self, annotated_data_points: QuerySet) -> List[dict if not (data_point["push_id"] in seen_push_ids or seen_add(data_point["push_id"])) ] - def _find_push_id_index(self, push_id: int, flattened_data_points: List[dict]) -> int: + def _find_push_id_index(self, push_id: int, flattened_data_points: list[dict]) -> int: for index, data_point in enumerate(flattened_data_points): if data_point["push_id"] == push_id: return index @@ -261,7 +261,7 @@ def __compute_window_slices(self, center_index: int) -> slice: return slice(left_margin, right_margin) - def _glance_over_retrigger_range(self, data_points_to_retrigger: List[dict]): + def _glance_over_retrigger_range(self, data_points_to_retrigger: list[dict]): retrigger_range = len(data_points_to_retrigger) if retrigger_range < self._range_width: self.log.warning( @@ -286,12 +286,12 @@ def __init__( self.log = logger or logging.getLogger(self.__class__.__name__) def provide_updated_reports( - self, since: datetime, frameworks: List[str], repositories: List[str] - ) -> List[BackfillReport]: + self, since: datetime, frameworks: list[str], repositories: list[str] + ) -> list[BackfillReport]: alert_summaries = self.__fetch_summaries_to_retrigger(since, frameworks, repositories) return self.compile_reports_for(alert_summaries) - def compile_reports_for(self, summaries_to_retrigger: QuerySet) -> List[BackfillReport]: + def compile_reports_for(self, summaries_to_retrigger: QuerySet) -> list[BackfillReport]: reports = [] for summary in summaries_to_retrigger: @@ -317,12 +317,12 @@ def compile_reports_for(self, summaries_to_retrigger: QuerySet) -> List[Backfill def _pick_important_alerts( self, from_summary: PerformanceAlertSummary - ) -> List[PerformanceAlert]: + ) -> list[PerformanceAlert]: return self.alerts_picker.extract_important_alerts( from_summary.alerts.filter(status=PerformanceAlert.UNTRIAGED) ) - def _provide_records(self, backfill_report: BackfillReport, alert_context_map: List[Tuple]): + def _provide_records(self, backfill_report: BackfillReport, alert_context_map: list[tuple]): for alert, retrigger_context in alert_context_map: BackfillRecord.objects.create( alert=alert, @@ -331,7 +331,7 @@ def _provide_records(self, backfill_report: BackfillReport, alert_context_map: L ) def __fetch_summaries_to_retrigger( - self, since: datetime, frameworks: List[str], repositories: List[str] + self, since: datetime, frameworks: list[str], repositories: list[str] ) -> QuerySet: no_reports_yet = Q(last_updated__gte=since, backfill_report__isnull=True) with_outdated_reports = Q(last_updated__gt=F("backfill_report__last_updated")) @@ -348,7 +348,7 @@ def __fetch_summaries_to_retrigger( .filter(filters) ) - def _associate_retrigger_context(self, important_alerts: List[PerformanceAlert]) -> List[Tuple]: + def _associate_retrigger_context(self, important_alerts: list[PerformanceAlert]) -> list[tuple]: retrigger_map = [] incomplete_mapping = False diff --git a/treeherder/perf/auto_perf_sheriffing/secretary.py b/treeherder/perf/auto_perf_sheriffing/secretary.py index 8c4558049b6..f8c5b5b493d 100644 --- a/treeherder/perf/auto_perf_sheriffing/secretary.py +++ b/treeherder/perf/auto_perf_sheriffing/secretary.py @@ -1,6 +1,5 @@ import logging from datetime import datetime, timedelta -from typing import List import simplejson as json from django.conf import settings as django_settings @@ -22,7 +21,7 @@ class Secretary: """ def __init__( - self, outcome_checker: OutcomeChecker = None, supported_platforms: List[str] = None + self, outcome_checker: OutcomeChecker = None, supported_platforms: list[str] = None ): self.outcome_checker = outcome_checker or OutcomeChecker() self.supported_platforms = supported_platforms or django_settings.SUPPORTED_PLATFORMS diff --git a/treeherder/perf/auto_perf_sheriffing/sherlock.py b/treeherder/perf/auto_perf_sheriffing/sherlock.py index dcaa5cb6efd..77e4e387f62 100644 --- a/treeherder/perf/auto_perf_sheriffing/sherlock.py +++ b/treeherder/perf/auto_perf_sheriffing/sherlock.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta from json import JSONDecodeError from logging import INFO, WARNING -from typing import List, Tuple from django.conf import settings from django.db.models import QuerySet @@ -35,7 +34,7 @@ def __init__( backfill_tool: BackfillTool, secretary: Secretary, max_runtime: timedelta = None, - supported_platforms: List[str] = None, + supported_platforms: list[str] = None, ): self.report_maintainer = report_maintainer self.backfill_tool = backfill_tool @@ -45,7 +44,7 @@ def __init__( self.supported_platforms = supported_platforms or settings.SUPPORTED_PLATFORMS self._wake_up_time = datetime.now() - def sheriff(self, since: datetime, frameworks: List[str], repositories: List[str]): + def sheriff(self, since: datetime, frameworks: list[str], repositories: list[str]): logger.info("Sherlock: Validating settings...") self.secretary.validate_settings() @@ -76,15 +75,15 @@ def assert_can_run(self): raise MaxRuntimeExceeded("Sherlock: Max runtime exceeded.") def _report( - self, since: datetime, frameworks: List[str], repositories: List[str] - ) -> List[BackfillReport]: + self, since: datetime, frameworks: list[str], repositories: list[str] + ) -> list[BackfillReport]: return self.report_maintainer.provide_updated_reports(since, frameworks, repositories) - def _backfill(self, frameworks: List[str], repositories: List[str]): + def _backfill(self, frameworks: list[str], repositories: list[str]): for platform in self.supported_platforms: self.__backfill_on(platform, frameworks, repositories) - def __backfill_on(self, platform: str, frameworks: List[str], repositories: List[str]): + def __backfill_on(self, platform: str, frameworks: list[str], repositories: list[str]): left = self.secretary.backfills_left(on_platform=platform) total_consumed = 0 @@ -110,7 +109,7 @@ def __backfill_on(self, platform: str, frameworks: List[str], repositories: List @staticmethod def __fetch_records_requiring_backfills_on( - platform: str, frameworks: List[str], repositories: List[str] + platform: str, frameworks: list[str], repositories: list[str] ) -> QuerySet: records_to_backfill = BackfillRecord.objects.select_related( "alert", @@ -126,7 +125,7 @@ def __fetch_records_requiring_backfills_on( ) return records_to_backfill - def _backfill_record(self, record: BackfillRecord, left: int) -> Tuple[int, int]: + def _backfill_record(self, record: BackfillRecord, left: int) -> tuple[int, int]: consumed = 0 try: @@ -160,7 +159,7 @@ def _backfill_record(self, record: BackfillRecord, left: int) -> Tuple[int, int] @staticmethod def _note_backfill_outcome( record: BackfillRecord, to_backfill: int, actually_backfilled: int - ) -> Tuple[bool, str]: + ) -> tuple[bool, str]: success = False record.total_actions_triggered = actually_backfilled @@ -200,7 +199,7 @@ def _is_queue_overloaded(provisioner_id: str, worker_type: str, acceptable_limit return pending_tasks_count > acceptable_limit @staticmethod - def __get_data_points_to_backfill(context: List[dict]) -> List[dict]: + def __get_data_points_to_backfill(context: list[dict]) -> list[dict]: context_len = len(context) start = None diff --git a/treeherder/perf/email.py b/treeherder/perf/email.py index f959e7a14d5..cd41f53bc3e 100644 --- a/treeherder/perf/email.py +++ b/treeherder/perf/email.py @@ -11,7 +11,7 @@ from abc import ABC, abstractmethod import urllib.parse -from typing import List, Union, Optional +from typing import Union, Optional from django.conf import settings from treeherder.perf.models import ( @@ -40,7 +40,7 @@ class EmailWriter(ABC): def __init__(self): self._email = Email() - def prepare_new_email(self, must_mention: Union[List[object], object]) -> dict: + def prepare_new_email(self, must_mention: Union[list[object], object]) -> dict: """ Template method """ @@ -64,12 +64,12 @@ def _write_subject(self): pass # pragma: no cover @abstractmethod - def _write_content(self, must_mention: List[object]): + def _write_content(self, must_mention: list[object]): pass # pragma: no cover @staticmethod - def __ensure_its_list(must_mention) -> List[object]: - if not isinstance(must_mention, List): + def __ensure_its_list(must_mention) -> list[object]: + if not isinstance(must_mention, list): must_mention = [must_mention] return must_mention @@ -90,7 +90,7 @@ class BackfillReportContent: def __init__(self): self._raw_content = None - def include_records(self, records: List[BackfillRecord]): + def include_records(self, records: list[BackfillRecord]): self._initialize_report_intro() for record in records: @@ -216,7 +216,7 @@ def _write_address(self): def _write_subject(self): self._email.subject = "Automatic Backfilling Report" - def _write_content(self, must_mention: List[BackfillRecord]): + def _write_content(self, must_mention: list[BackfillRecord]): content = BackfillReportContent() content.include_records(must_mention) @@ -238,7 +238,7 @@ class DeletionReportContent: def __init__(self): self._raw_content = None - def include_signatures(self, signatures: List[PerformanceSignature]): + def include_signatures(self, signatures: list[PerformanceSignature]): self._initialize_report_intro() for signature in signatures: @@ -287,7 +287,7 @@ def _write_address(self): def _write_subject(self): self._email.subject = "Summary of deleted Performance Signatures" - def _write_content(self, must_mention: List[PerformanceSignature]): + def _write_content(self, must_mention: list[PerformanceSignature]): content = DeletionReportContent() content.include_signatures(must_mention) diff --git a/treeherder/perf/management/commands/compute_criteria_formulas.py b/treeherder/perf/management/commands/compute_criteria_formulas.py index aaf22ba7bb3..5abdce99437 100644 --- a/treeherder/perf/management/commands/compute_criteria_formulas.py +++ b/treeherder/perf/management/commands/compute_criteria_formulas.py @@ -1,7 +1,6 @@ import time from datetime import timedelta -from typing import List from treeherder.config import settings from treeherder.perf.sheriffing_criteria import ( @@ -15,7 +14,7 @@ from django.core.management.base import BaseCommand -def pretty_enumerated(formulas: List[str]) -> str: +def pretty_enumerated(formulas: list[str]) -> str: comma = ", " return " & ".join(comma.join(formulas).rsplit(comma, maxsplit=1)) diff --git a/treeherder/perf/management/commands/import_perf_data.py b/treeherder/perf/management/commands/import_perf_data.py index 607c671f7ae..4b0a5e88f53 100644 --- a/treeherder/perf/management/commands/import_perf_data.py +++ b/treeherder/perf/management/commands/import_perf_data.py @@ -50,14 +50,14 @@ def progress_notifier( tabs_no=0, ): total_items = len(iterable) - print("{0}Fetching {1} {2} item(s)...".format("\t" * tabs_no, total_items, item_name)) + print("{}Fetching {} {} item(s)...".format("\t" * tabs_no, total_items, item_name)) prev_percentage = None for idx, item in enumerate(iterable): item_processor(item) percentage = int((idx + 1) * 100 / total_items) if percentage % 10 == 0 and percentage != prev_percentage: - print("{0}Fetched {1}% of {2} item(s)".format("\t" * tabs_no, percentage, item_name)) + print("{}Fetched {}% of {} item(s)".format("\t" * tabs_no, percentage, item_name)) prev_percentage = percentage @@ -86,14 +86,14 @@ def fillup_target(self, **filters): def show_progress(self, queryset, map, table_name): total_rows = int(queryset.count()) - print("Fetching {0} {1}(s)...".format(total_rows, table_name)) + print(f"Fetching {total_rows} {table_name}(s)...") prev_percentage = None for idx, obj in enumerate(list(queryset)): map(obj) percentage = int((idx + 1) * 100 / total_rows) if percentage % 10 == 0 and percentage != prev_percentage: - print("Fetched {0}% of alert summaries".format(percentage)) + print(f"Fetched {percentage}% of alert summaries") prev_percentage = percentage @@ -112,19 +112,19 @@ class DecentSizedData(Data): def delete_local_data(self): for model in self.DECENT_SIZED_TABLES: - print("Removing elements from {0} table... ".format(model._meta.db_table)) + print(f"Removing elements from {model._meta.db_table} table... ") model.objects.using(self.target).all().delete() def save_local_data(self): for model in self.DECENT_SIZED_TABLES: - print("Fetching from {0} table...".format(model._meta.db_table)) + print(f"Fetching from {model._meta.db_table} table...") model.objects.using(self.target).bulk_create(model.objects.using(self.source).all()) def fillup_target(self, **filters): print("Fetching all affordable data...\n") # TODO: JSON dump the list print( - "From tables {0}".format( + "From tables {}".format( ", ".join([model._meta.db_table for model in self.DECENT_SIZED_TABLES]) ) ) @@ -224,7 +224,7 @@ def __init__( def delete_local_data(self): for model in self.BIG_SIZED_TABLES: - print("Removing elements from {0} table... ".format(model._meta.db_table)) + print(f"Removing elements from {model._meta.db_table} table... ") model.objects.using(self.target).all().delete() def save_local_data(self): @@ -233,7 +233,7 @@ def save_local_data(self): ) for table_name, properties in priority_dict.items(): - print("Saving {0} data...".format(table_name)) + print(f"Saving {table_name} data...") model_values = ( properties["model"] .objects.using(self.source) @@ -257,7 +257,7 @@ def fillup_target(self, **filters): # fetch all alert summaries & alerts # with only a subset of the datum & jobs oldest_day = datetime.datetime.now() - self.time_window - print("\nFetching data subset no older than {0}...".format(str(oldest_day))) + print(f"\nFetching data subset no older than {str(oldest_day)}...") self.delete_local_data() alert_summaries = list(self.query_set) @@ -293,7 +293,7 @@ def fillup_target(self, **filters): self.save_local_data() def db_worker(self, process_no, alert_summaries): - print("Process no {0} up and running...".format(process_no)) + print(f"Process no {process_no} up and running...") self.progress_notifier(self.bring_in_alert_summary, alert_summaries, "alert summary", 1) def bring_in_alert_summary(self, alert_summary): @@ -314,7 +314,7 @@ def bring_in_alert(self, alert): if alert.id in self.models_instances["performance_alert"]: return - print("{0}Fetching alert #{1}...".format("\t" * 2, alert.id)) + print("{}Fetching alert #{}...".format("\t" * 2, alert.id)) if alert.related_summary: if alert.related_summary not in self.models_instances["performance_alert_summary"]: # if the alert summary identified isn't registered yet @@ -365,7 +365,7 @@ def bring_in_job(self, job): if job.id in self.models_instances["job"]: return - occasional_log("{0}Fetching job #{1}".format("\t" * 4, job.id)) + occasional_log("{}Fetching job #{}".format("\t" * 4, job.id)) self.update_list("reference_data_signature", job.signature) self.update_list("build_platform", job.build_platform) diff --git a/treeherder/perf/management/commands/perf_sheriff.py b/treeherder/perf/management/commands/perf_sheriff.py index eff2eba1df9..2bb80788770 100644 --- a/treeherder/perf/management/commands/perf_sheriff.py +++ b/treeherder/perf/management/commands/perf_sheriff.py @@ -1,6 +1,5 @@ import logging from datetime import datetime, timedelta -from typing import List, Tuple from django.core.management.base import BaseCommand @@ -65,7 +64,7 @@ def handle(self, *args, **options): logging.info("Sherlock: Going back to sleep.") - def _parse_args(self, **options) -> Tuple[List, List, datetime, timedelta]: + def _parse_args(self, **options) -> tuple[list, list, datetime, timedelta]: return ( options["frameworks"], options["repositories"], diff --git a/treeherder/perf/migrations/0001_squashed_0005_permit_github_links.py b/treeherder/perf/migrations/0001_squashed_0005_permit_github_links.py index 2c335120074..ed21986b48e 100644 --- a/treeherder/perf/migrations/0001_squashed_0005_permit_github_links.py +++ b/treeherder/perf/migrations/0001_squashed_0005_permit_github_links.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.11 on 2018-03-08 13:19 import django.core.validators import django.db.models.deletion diff --git a/treeherder/perf/migrations/0006_add_alert_summary_notes.py b/treeherder/perf/migrations/0006_add_alert_summary_notes.py index 96044127567..2066690b6c2 100644 --- a/treeherder/perf/migrations/0006_add_alert_summary_notes.py +++ b/treeherder/perf/migrations/0006_add_alert_summary_notes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.10 on 2018-03-08 14:53 from django.db import migrations, models diff --git a/treeherder/perf/migrations/0007_star_performancealert.py b/treeherder/perf/migrations/0007_star_performancealert.py index cb19f0e5b25..bcc725e2bd0 100644 --- a/treeherder/perf/migrations/0007_star_performancealert.py +++ b/treeherder/perf/migrations/0007_star_performancealert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.10 on 2018-04-19 09:25 from django.db import migrations, models diff --git a/treeherder/perf/migrations/0008_add_confirming_state.py b/treeherder/perf/migrations/0008_add_confirming_state.py index f15b4de23e9..af529a4ac6c 100644 --- a/treeherder/perf/migrations/0008_add_confirming_state.py +++ b/treeherder/perf/migrations/0008_add_confirming_state.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-05-14 11:40 from django.db import migrations, models diff --git a/treeherder/perf/migrations/0009_non_nullable_issue_tracker.py b/treeherder/perf/migrations/0009_non_nullable_issue_tracker.py index bf5aa84c5e3..f87344b66e8 100644 --- a/treeherder/perf/migrations/0009_non_nullable_issue_tracker.py +++ b/treeherder/perf/migrations/0009_non_nullable_issue_tracker.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-05-23 08:07 from django.db import migrations, models import django.db.models.deletion diff --git a/treeherder/perf/migrations/0010_fix_signature_uniqueness.py b/treeherder/perf/migrations/0010_fix_signature_uniqueness.py index 135906db1bf..1f08d9810fc 100644 --- a/treeherder/perf/migrations/0010_fix_signature_uniqueness.py +++ b/treeherder/perf/migrations/0010_fix_signature_uniqueness.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2018-09-28 11:41 from django.db import migrations diff --git a/treeherder/perf/migrations/0011_inc_extra_options_length.py b/treeherder/perf/migrations/0011_inc_extra_options_length.py index 34b39843d0d..5549922fd78 100644 --- a/treeherder/perf/migrations/0011_inc_extra_options_length.py +++ b/treeherder/perf/migrations/0011_inc_extra_options_length.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2018-11-06 08:20 from django.db import migrations, models diff --git a/treeherder/perf/migrations/0012_rename_summary_last_updated.py b/treeherder/perf/migrations/0012_rename_summary_last_updated.py index 369c7b79b61..ca3ff0302fc 100644 --- a/treeherder/perf/migrations/0012_rename_summary_last_updated.py +++ b/treeherder/perf/migrations/0012_rename_summary_last_updated.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Generated by Django 1.11.16 on 2019-02-20 15:02 from django.db import migrations diff --git a/treeherder/perf/models.py b/treeherder/perf/models.py index 16a2176ce10..e146b2e0088 100644 --- a/treeherder/perf/models.py +++ b/treeherder/perf/models.py @@ -1,7 +1,7 @@ import logging from datetime import datetime import json -from typing import List, Tuple, Optional +from typing import Optional from functools import reduce from django.contrib.auth.models import User @@ -35,7 +35,7 @@ class Meta: db_table = "performance_framework" @classmethod - def fetch_all_names(cls) -> List[str]: + def fetch_all_names(cls) -> list[str]: return cls.objects.values_list("name", flat=True) def __str__(self): @@ -183,11 +183,11 @@ class Meta: def __str__(self): name = self.suite if self.test: - name += " {}".format(self.test) + name += f" {self.test}" else: name += " summary" - return "{} {} {} {}".format(self.signature_hash, name, self.platform, self.last_updated) + return f"{self.signature_hash} {name} {self.platform} {self.last_updated}" class PerformanceDatum(models.Model): @@ -224,7 +224,7 @@ def save(self, *args, **kwargs): self.signature.save() def __str__(self): - return "{} {}".format(self.value, self.push_timestamp) + return f"{self.value} {self.push_timestamp}" class PerformanceDatumReplicate(models.Model): @@ -254,7 +254,7 @@ class Meta: db_table = "issue_tracker" def __str__(self): - return "{} (tasks via {})".format(self.name, self.task_base_url) + return f"{self.name} (tasks via {self.task_base_url})" class PerformanceAlertSummary(models.Model): @@ -317,7 +317,7 @@ class PerformanceAlertSummary(models.Model): issue_tracker = models.ForeignKey(IssueTracker, on_delete=models.PROTECT, default=1) # Bugzilla def __init__(self, *args, **kwargs): - super(PerformanceAlertSummary, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) # allows updating timestamps only on new values self.__prev_bug_number = self.bug_number @@ -333,7 +333,7 @@ def save(self, *args, **kwargs): self.triage_due_date = triage_due if self.bug_due_date != bug_due: self.bug_due_date = bug_due - super(PerformanceAlertSummary, self).save(*args, **kwargs) + super().save(*args, **kwargs) self.__prev_bug_number = self.bug_number def update_status(self, using=None): @@ -418,9 +418,7 @@ class Meta: unique_together = ("repository", "framework", "prev_push", "push") def __str__(self): - return "{} {} {}-{}".format( - self.framework, self.repository, self.prev_push.revision, self.push.revision - ) + return f"{self.framework} {self.repository} {self.prev_push.revision}-{self.push.revision}" class PerformanceAlert(models.Model): @@ -582,7 +580,7 @@ class Meta: unique_together = ("summary", "series_signature") def __str__(self): - return "{} {} {}%".format(self.summary, self.series_signature, self.amount_pct) + return f"{self.summary} {self.series_signature} {self.amount_pct}%" class PerformanceTag(models.Model): @@ -615,7 +613,7 @@ class Meta: db_table = "performance_bug_template" def __str__(self): - return "{} bug template".format(self.framework.name) + return f"{self.framework.name} bug template" # TODO: we actually need this name for the Sherlock' s hourly report @@ -649,9 +647,7 @@ class Meta: db_table = "backfill_report" def __str__(self): - return "BackfillReport(summary #{}, last update {})".format( - self.summary.id, self.last_updated - ) + return f"BackfillReport(summary #{self.summary.id}, last update {self.last_updated})" class BackfillRecord(models.Model): @@ -750,7 +746,7 @@ def __remember_job_properties(self, job: Job): self.job_platform_option = job.get_platform_option() self.save() - def get_context_border_info(self, context_property: str) -> Tuple[str, str]: + def get_context_border_info(self, context_property: str) -> tuple[str, str]: """ Provides border(first and last) information from context based on the property """ @@ -760,7 +756,7 @@ def get_context_border_info(self, context_property: str) -> Tuple[str, str]: return from_info, to_info - def get_pushes_in_context_range(self) -> List[Push]: + def get_pushes_in_context_range(self) -> list[Push]: from_time, to_time = self.get_context_border_info("push_timestamp") return Push.objects.filter( @@ -779,10 +775,10 @@ def get_job_search_str(self) -> str: return ",".join(search_terms) - def get_context(self) -> List[dict]: + def get_context(self) -> list[dict]: return json.loads(self.context) - def set_context(self, value: List[dict]): + def set_context(self, value: list[dict]): self.context = json.dumps(value, default=str) def set_log_details(self, value: dict): @@ -801,7 +797,7 @@ class Meta: db_table = "backfill_record" def __str__(self): - return "BackfillRecord(alert #{}, from {})".format(self.alert.id, self.report) + return f"BackfillRecord(alert #{self.alert.id}, from {self.report})" class BackfillNotificationRecord(models.Model): diff --git a/treeherder/perf/sheriffing_criteria/bugzilla_formulas.py b/treeherder/perf/sheriffing_criteria/bugzilla_formulas.py index bb3f4cca95b..0c529c6e88f 100644 --- a/treeherder/perf/sheriffing_criteria/bugzilla_formulas.py +++ b/treeherder/perf/sheriffing_criteria/bugzilla_formulas.py @@ -1,7 +1,6 @@ from abc import ABC, abstractmethod from copy import deepcopy from datetime import timedelta, datetime -from typing import Tuple, List import requests from django.conf import settings @@ -32,7 +31,7 @@ def __init__(self, referer=None): # IP when making many queries with this self.headers = { "Referer": f"{referer}", - "User-Agent": "treeherder/{}".format(settings.SITE_HOSTNAME), + "User-Agent": f"treeherder/{settings.SITE_HOSTNAME}", "Accept": "application/json", } @@ -91,7 +90,7 @@ def __call__(self, framework: str, suite: str, test: str = None) -> float: return result - def breakdown(self) -> Tuple[list, list]: + def breakdown(self) -> tuple[list, list]: breakdown_items = (self._denominator_bugs, self._numerator_bugs) if None in breakdown_items: raise RuntimeError("Cannot breakdown results without running calculus first") @@ -107,11 +106,11 @@ def has_cooled_down(self, bug: dict) -> bool: return creation_time <= datetime.now() - self._bug_cooldown @abstractmethod - def _filter_numerator_bugs(self, all_filed_bugs: List[dict]) -> List[dict]: + def _filter_numerator_bugs(self, all_filed_bugs: list[dict]) -> list[dict]: pass @abstractmethod - def _filter_denominator_bugs(self, all_filed_bugs: List[dict]) -> List[dict]: + def _filter_denominator_bugs(self, all_filed_bugs: list[dict]) -> list[dict]: pass def _create_default_session(self) -> NonBlockableSession: @@ -120,12 +119,12 @@ def _create_default_session(self) -> NonBlockableSession: """ return NonBlockableSession() - def __fetch_cooled_down_bugs(self, framework: str, suite: str, test: str = None) -> List[dict]: + def __fetch_cooled_down_bugs(self, framework: str, suite: str, test: str = None) -> list[dict]: quantified_bugs = self.__fetch_quantified_bugs(framework, suite, test) cooled_bugs = self.__filter_cooled_down_bugs(quantified_bugs) return cooled_bugs - def __fetch_quantified_bugs(self, framework: str, suite: str, test: str = None) -> List[dict]: + def __fetch_quantified_bugs(self, framework: str, suite: str, test: str = None) -> list[dict]: test_moniker = " ".join(filter(None, (suite, test))) test_id_fragments = filter(None, [framework, test_moniker]) creation_time = datetime.strftime(self.oldest_timestamp, BZ_DATETIME_FORMAT) @@ -153,7 +152,7 @@ def __fetch_quantified_bugs(self, framework: str, suite: str, test: str = None) else: return bugs_resp.json()["bugs"] - def __filter_cooled_down_bugs(self, bugs: List[dict]) -> List[dict]: + def __filter_cooled_down_bugs(self, bugs: list[dict]) -> list[dict]: return [bug for bug in bugs if self.has_cooled_down(bug)] def __reset_breakdown(self): @@ -165,7 +164,7 @@ def __get_datetime(self, datetime_: str) -> datetime: class EngineerTractionFormula(BugzillaFormula): - def _filter_numerator_bugs(self, cooled_bugs: List[dict]) -> List[dict]: + def _filter_numerator_bugs(self, cooled_bugs: list[dict]) -> list[dict]: tracted_bugs = [] for bug in cooled_bugs: bug_history = self._fetch_history(bug["id"]) @@ -177,7 +176,7 @@ def _filter_numerator_bugs(self, cooled_bugs: List[dict]) -> List[dict]: return tracted_bugs - def _filter_denominator_bugs(self, all_filed_bugs: List[dict]) -> List[dict]: + def _filter_denominator_bugs(self, all_filed_bugs: list[dict]) -> list[dict]: return all_filed_bugs def _fetch_history(self, bug_id: int) -> list: @@ -193,7 +192,7 @@ def _fetch_history(self, bug_id: int) -> list: body = history_resp.json() return body["bugs"][0]["history"] - def _notice_any_status_change_in(self, bug_history: List[dict], up_to: datetime) -> bool: + def _notice_any_status_change_in(self, bug_history: list[dict], up_to: datetime) -> bool: def during_interval(change: dict) -> bool: when = datetime.strptime(change["when"], BZ_DATETIME_FORMAT) return when <= up_to @@ -213,7 +212,7 @@ def _create_default_session(self) -> NonBlockableSession: class FixRatioFormula(BugzillaFormula): - def _filter_numerator_bugs(self, all_filed_bugs: List[dict]) -> List[dict]: + def _filter_numerator_bugs(self, all_filed_bugs: list[dict]) -> list[dict]: # select only RESOLVED - FIXED bugs return [ bug @@ -221,7 +220,7 @@ def _filter_numerator_bugs(self, all_filed_bugs: List[dict]) -> List[dict]: if bug.get("status") == "RESOLVED" and bug.get("resolution") == "FIXED" ] - def _filter_denominator_bugs(self, all_filed_bugs: List[dict]) -> List[dict]: + def _filter_denominator_bugs(self, all_filed_bugs: list[dict]) -> list[dict]: # select RESOLVED bugs, no matter what resolution they have return [bug for bug in all_filed_bugs if bug.get("status") == "RESOLVED"] diff --git a/treeherder/perf/sheriffing_criteria/criteria_tracking.py b/treeherder/perf/sheriffing_criteria/criteria_tracking.py index ddd449f4ab6..73019967a52 100644 --- a/treeherder/perf/sheriffing_criteria/criteria_tracking.py +++ b/treeherder/perf/sheriffing_criteria/criteria_tracking.py @@ -4,7 +4,7 @@ from multiprocessing import cpu_count from multiprocessing.pool import Pool, ThreadPool, AsyncResult import time -from typing import Tuple, Dict, Union, List +from typing import Union from datetime import datetime, timedelta @@ -49,7 +49,7 @@ def __post_init__(self): class RecordComputer: def __init__( self, - formula_map: Dict[str, BugzillaFormula], + formula_map: dict[str, BugzillaFormula], time_until_expires: timedelta, webservice_rest_time: timedelta, logger=None, @@ -162,7 +162,7 @@ def __init__(self, check_interval, timeout_after: timedelta, logger=None): self.__last_change = 0 self.__since_last_change = timedelta(seconds=0) - def wait_for_results(self, results: List[AsyncResult]): + def wait_for_results(self, results: list[AsyncResult]): self.__reset_change_track() while True: @@ -180,7 +180,7 @@ def wait_for_results(self, results: List[AsyncResult]): f"Haven't computed updates for all records yet (only {len(ready)} out of {len(results)}). Still waiting..." ) - def __updates_stagnated(self, results: List[AsyncResult], last_check_on: float) -> bool: + def __updates_stagnated(self, results: list[AsyncResult], last_check_on: float) -> bool: ready_amount = len([r for r in results if r.ready()]) total_results = len(results) new_change = total_results - ready_amount @@ -213,7 +213,7 @@ class CriteriaTracker: def __init__( self, - formula_map: Dict[str, BugzillaFormula] = None, + formula_map: dict[str, BugzillaFormula] = None, record_path: str = None, webservice_rest_time: timedelta = None, multiprocessed: bool = False, @@ -236,7 +236,7 @@ def __init__( if not callable(formula): raise TypeError("Must provide callable as sheriffing criteria formula") - def get_test_moniker(self, record: CriteriaRecord) -> Tuple[str, str, str]: + def get_test_moniker(self, record: CriteriaRecord) -> tuple[str, str, str]: return record.Framework, record.Suite, record.Test def __iter__(self): @@ -247,7 +247,7 @@ def load_records(self): self.log.info(f"Loading records from {self._record_path}...") self._records_map = {} # reset them - with open(self._record_path, "r") as csv_file: + with open(self._record_path) as csv_file: reader = csv.DictReader(csv_file) for row in reader: test_moniker = row.get("Framework"), row.get("Suite"), row.get("Test") @@ -283,7 +283,7 @@ def compute_record_update(self, record: CriteriaRecord) -> CriteriaRecord: record = self._computer.apply_formulas(record) return record - def create_formula_map(self) -> Dict[str, BugzillaFormula]: + def create_formula_map(self) -> dict[str, BugzillaFormula]: return { self.ENGINEER_TRACTION: EngineerTractionFormula(), self.FIX_RATIO: FixRatioFormula(), diff --git a/treeherder/perfalert/perfalert/__init__.py b/treeherder/perfalert/perfalert/__init__.py index a6d1dbe75f8..22f2fe7d3ec 100644 --- a/treeherder/perfalert/perfalert/__init__.py +++ b/treeherder/perfalert/perfalert/__init__.py @@ -107,7 +107,7 @@ def __lt__(self, o): def __repr__(self): values_str = "[ %s ]" % ", ".join(["%.3f" % value for value in self.values]) - return "<%s: %s, %s, %.3f, %s>" % ( + return "<{}: {}, {}, {:.3f}, {}>".format( self.push_timestamp, self.push_id, values_str, diff --git a/treeherder/push_health/tests.py b/treeherder/push_health/tests.py index 6ad411e9893..d7b11a4a597 100644 --- a/treeherder/push_health/tests.py +++ b/treeherder/push_health/tests.py @@ -103,12 +103,10 @@ def get_current_test_failures(push, option_map, jobs, investigatedTests=None): job_symbol = job.job_type.symbol job_group = job.job_group.name job_group_symbol = job.job_group.symbol - job.job_key = "{}{}{}{}".format(config, platform, job_name, job_group) + job.job_key = f"{config}{platform}{job_name}{job_group}" all_failed_jobs[job.id] = job # The 't' ensures the key starts with a character, as required for a query selector - test_key = re.sub( - r"\W+", "", "t{}{}{}{}{}".format(test_name, config, platform, job_name, job_group) - ) + test_key = re.sub(r"\W+", "", f"t{test_name}{config}{platform}{job_name}{job_group}") isClassifiedIntermittent = any( job["failure_classification_id"] == 4 for job in jobs[job_name] ) @@ -215,7 +213,7 @@ def get_test_failures( jobs, result_status=set(), ): - logger.debug("Getting test failures for push: {}".format(push.id)) + logger.debug(f"Getting test failures for push: {push.id}") # query for jobs for the last two weeks excluding today # find tests that have failed in the last 14 days # this is very cache-able for reuse on other pushes. diff --git a/treeherder/push_health/usage.py b/treeherder/push_health/usage.py index 8fe14445b7b..c1167f82237 100644 --- a/treeherder/push_health/usage.py +++ b/treeherder/push_health/usage.py @@ -37,7 +37,7 @@ def get_usage(): nrql = "SELECT%20max(needInvestigation)%20FROM%20push_health_need_investigation%20FACET%20revision%20SINCE%201%20DAY%20AGO%20TIMESERIES%20where%20repo%3D'{}'%20AND%20appName%3D'{}'".format( "try", "treeherder-prod" ) - new_relic_url = "{}?nrql={}".format(settings.NEW_RELIC_INSIGHTS_API_URL, nrql) + new_relic_url = f"{settings.NEW_RELIC_INSIGHTS_API_URL}?nrql={nrql}" headers = { "Accept": "application/json", "Content-Type": "application/json", diff --git a/treeherder/push_health/utils.py b/treeherder/push_health/utils.py index 132af648867..0eac39f1964 100644 --- a/treeherder/push_health/utils.py +++ b/treeherder/push_health/utils.py @@ -31,7 +31,7 @@ def clean_test(test, signature, message): elif clean_name.startswith("http://10.0"): left = "/tests/".join(left.split("/tests/")[1:]) right = "/tests/".join(right.split("/tests/")[1:]) - clean_name = "%s%s%s" % (left, splitter, right) + clean_name = f"{left}{splitter}{right}" if "test_end for" in clean_name: clean_name = clean_name.split()[2] diff --git a/treeherder/services/pulse/consumers.py b/treeherder/services/pulse/consumers.py index 8d176d0bce5..ff3b2a02d1b 100644 --- a/treeherder/services/pulse/consumers.py +++ b/treeherder/services/pulse/consumers.py @@ -59,7 +59,7 @@ def __init__(self, source, build_routing_key): self.connection = Connection(source["pulse_url"], virtual_host=source.get("vhost", "/")) self.consumers = [] self.queue = None - self.queue_name = "queue/{}/{}".format(self.connection.userid, self.queue_suffix) + self.queue_name = f"queue/{self.connection.userid}/{self.queue_suffix}" self.root_url = source["root_url"] self.source = source self.build_routing_key = build_routing_key @@ -110,7 +110,7 @@ def bind_to(self, exchange, routing_key): # get the binding key for this consumer binding = self.get_binding_str(exchange.name, routing_key) - logger.info("Pulse queue {} bound to: {}".format(self.queue_name, binding)) + logger.info(f"Pulse queue {self.queue_name} bound to: {binding}") return binding @@ -146,11 +146,11 @@ def prune_bindings(self, new_bindings): def get_binding_str(self, exchange, routing_key): """Use consistent string format for binding comparisons""" - return "{} {}".format(exchange, routing_key) + return f"{exchange} {routing_key}" def get_bindings(self, queue_name): """Get list of bindings from the pulse API""" - return fetch_json("{}queue/{}/bindings".format(PULSE_GUARDIAN_URL, queue_name)) + return fetch_json(f"{PULSE_GUARDIAN_URL}queue/{queue_name}/bindings") class TaskConsumer(PulseConsumer): @@ -227,7 +227,7 @@ class JointConsumer(PulseConsumer): thread, so we use multiple threads, one per consumer. """ - queue_suffix = env("PULSE_QUEUE_NAME", default="queue_{}".format(socket.gethostname())) + queue_suffix = env("PULSE_QUEUE_NAME", default=f"queue_{socket.gethostname()}") def bindings(self): rv = [] diff --git a/treeherder/services/taskcluster.py b/treeherder/services/taskcluster.py index 98e3865ff24..c24736b1ee4 100644 --- a/treeherder/services/taskcluster.py +++ b/treeherder/services/taskcluster.py @@ -1,7 +1,6 @@ import logging import uuid from abc import ABC, abstractmethod -from typing import List, Tuple import requests import jsone @@ -169,7 +168,7 @@ def _get_action(action_array: list, action_name: str) -> str: ) @classmethod - def _task_in_context(cls, context: List[dict], task_tags: dict) -> bool: + def _task_in_context(cls, context: list[dict], task_tags: dict) -> bool: """ A task (as defined by its tags) is said to match a tag-set if its tags are a super-set of the tag-set. A tag-set is a set of key-value pairs. @@ -254,7 +253,7 @@ def notify_client_factory( return NotifyNullObject() -def autofind_unprovided(access_token, client_id) -> Tuple[str, str]: +def autofind_unprovided(access_token, client_id) -> tuple[str, str]: client_id = client_id or settings.NOTIFY_CLIENT_ID access_token = access_token or settings.NOTIFY_ACCESS_TOKEN return client_id, access_token diff --git a/treeherder/utils/github.py b/treeherder/utils/github.py index e57a839957a..8207eee0d24 100644 --- a/treeherder/utils/github.py +++ b/treeherder/utils/github.py @@ -4,31 +4,31 @@ def fetch_api(path, params=None): if GITHUB_TOKEN: - headers = {"Authorization": "token {}".format(GITHUB_TOKEN)} + headers = {"Authorization": f"token {GITHUB_TOKEN}"} else: headers = {} - return fetch_json("https://api.github.com/{}".format(path), params, headers) + return fetch_json(f"https://api.github.com/{path}", params, headers) def get_releases(owner, repo, params=None): - return fetch_api("repos/{}/{}/releases".format(owner, repo), params) + return fetch_api(f"repos/{owner}/{repo}/releases", params) def get_repo(owner, repo, params=None): - return fetch_api("repos/{}/{}".format(owner, repo), params) + return fetch_api(f"repos/{owner}/{repo}", params) def compare_shas(owner, repo, base, head): - return fetch_api("repos/{}/{}/compare/{}...{}".format(owner, repo, base, head)) + return fetch_api(f"repos/{owner}/{repo}/compare/{base}...{head}") def get_all_commits(owner, repo, params=None): - return fetch_api("repos/{}/{}/commits".format(owner, repo), params) + return fetch_api(f"repos/{owner}/{repo}/commits", params) def get_commit(owner, repo, sha, params=None): - return fetch_api("repos/{}/{}/commits/{}".format(owner, repo, sha), params) + return fetch_api(f"repos/{owner}/{repo}/commits/{sha}", params) def get_pull_request(owner, repo, sha, params=None): - return fetch_api("repos/{}/{}/pulls/{}/commits".format(owner, repo, sha), params) + return fetch_api(f"repos/{owner}/{repo}/pulls/{sha}/commits", params) diff --git a/treeherder/utils/http.py b/treeherder/utils/http.py index 455bb59daec..f7326451694 100644 --- a/treeherder/utils/http.py +++ b/treeherder/utils/http.py @@ -6,7 +6,7 @@ def make_request(url, method="GET", headers=None, timeout=30, **kwargs): """A wrapper around requests to set defaults & call raise_for_status().""" headers = headers or {} - headers["User-Agent"] = "treeherder/{}".format(settings.SITE_HOSTNAME) + headers["User-Agent"] = f"treeherder/{settings.SITE_HOSTNAME}" response = requests.request(method, url, headers=headers, timeout=timeout, **kwargs) if response.history: params = { diff --git a/treeherder/utils/taskcluster.py b/treeherder/utils/taskcluster.py index 4f15423170a..97ad7695c9d 100644 --- a/treeherder/utils/taskcluster.py +++ b/treeherder/utils/taskcluster.py @@ -5,7 +5,7 @@ def get_task_definition(root_url, task_id): - task_url = taskcluster_urls.api(root_url, "queue", "v1", "task/{}".format(task_id)) + task_url = taskcluster_urls.api(root_url, "queue", "v1", f"task/{task_id}") return fetch_json(task_url) @@ -16,9 +16,7 @@ def download_artifact(root_url, task_id, path): Returns either the parsed json, the parsed yaml or the plain response. """ - artifact_url = taskcluster_urls.api( - root_url, "queue", "v1", "task/{}/artifacts/{}".format(task_id, path) - ) + artifact_url = taskcluster_urls.api(root_url, "queue", "v1", f"task/{task_id}/artifacts/{path}") if path.endswith(".json"): return fetch_json(artifact_url) diff --git a/treeherder/webapp/api/bugzilla.py b/treeherder/webapp/api/bugzilla.py index 62fe545b4cd..6ce39aad259 100644 --- a/treeherder/webapp/api/bugzilla.py +++ b/treeherder/webapp/api/bugzilla.py @@ -1,5 +1,3 @@ -# coding: utf-8 - import requests from django.conf import settings from rest_framework import viewsets diff --git a/treeherder/webapp/api/infra_serializers.py b/treeherder/webapp/api/infra_serializers.py index f2fc9cf5cc2..af80d785020 100644 --- a/treeherder/webapp/api/infra_serializers.py +++ b/treeherder/webapp/api/infra_serializers.py @@ -37,6 +37,6 @@ def validate_repository(self, project): Repository.objects.get(name=project) except ObjectDoesNotExist: - raise serializers.ValidationError("{} does not exist.".format(project)) + raise serializers.ValidationError(f"{project} does not exist.") return project diff --git a/treeherder/webapp/api/investigated_test.py b/treeherder/webapp/api/investigated_test.py index 1857810681a..580750428e3 100644 --- a/treeherder/webapp/api/investigated_test.py +++ b/treeherder/webapp/api/investigated_test.py @@ -26,14 +26,10 @@ def get_queryset(self): return queryset except Push.DoesNotExist: - return Response( - "No push with revision: {0}".format(revision), status=HTTP_404_NOT_FOUND - ) + return Response(f"No push with revision: {revision}", status=HTTP_404_NOT_FOUND) except InvestigatedTests.DoesNotExist: - return Response( - "No push with revision: {0}".format(revision), status=HTTP_404_NOT_FOUND - ) + return Response(f"No push with revision: {revision}", status=HTTP_404_NOT_FOUND) def create(self, request, *args, **kwargs): project = kwargs["project"] @@ -52,19 +48,13 @@ def create(self, request, *args, **kwargs): return Response(serializer.data, status=status.HTTP_201_CREATED) except IntegrityError: - return Response( - "{0} already marked investigated".format(test), status=HTTP_400_BAD_REQUEST - ) + return Response(f"{test} already marked investigated", status=HTTP_400_BAD_REQUEST) except Push.DoesNotExist: - return Response( - "No push with revision: {0}".format(revision), status=HTTP_404_NOT_FOUND - ) + return Response(f"No push with revision: {revision}", status=HTTP_404_NOT_FOUND) except JobType.DoesNotExist: - return Response( - "No JobType with job name: {0}".format(jobName), status=HTTP_404_NOT_FOUND - ) + return Response(f"No JobType with job name: {jobName}", status=HTTP_404_NOT_FOUND) def destroy(self, request, project, pk=None): try: diff --git a/treeherder/webapp/api/jobs.py b/treeherder/webapp/api/jobs.py index f17da7d86ca..6eac51608ab 100644 --- a/treeherder/webapp/api/jobs.py +++ b/treeherder/webapp/api/jobs.py @@ -279,7 +279,7 @@ def retrieve(self, request, project, pk=None): repository__name=project, id=pk ) except Job.DoesNotExist: - return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No job with id: {pk}", status=HTTP_404_NOT_FOUND) resp = serializers.JobProjectSerializer(job, read_only=True).data @@ -333,7 +333,7 @@ def list(self, request, project): parser.parse(param_value) except ValueError: return Response( - "Invalid date value for `last_modified`: {}".format(param_value), + f"Invalid date value for `last_modified`: {param_value}", status=HTTP_400_BAD_REQUEST, ) filter_params[param_key] = param_value @@ -349,14 +349,14 @@ def list(self, request, project): return_type = filter_params.get("return_type", "dict").lower() if count > MAX_JOBS_COUNT: - msg = "Specified count exceeds API MAX_JOBS_COUNT value: {}".format(MAX_JOBS_COUNT) + msg = f"Specified count exceeds API MAX_JOBS_COUNT value: {MAX_JOBS_COUNT}" return Response({"detail": msg}, status=HTTP_400_BAD_REQUEST) try: repository = Repository.objects.get(name=project) except Repository.DoesNotExist: return Response( - {"detail": "No project with name {}".format(project)}, status=HTTP_404_NOT_FOUND + {"detail": f"No project with name {project}"}, status=HTTP_404_NOT_FOUND ) jobs = JobFilter( {k: v for (k, v) in filter_params.items()}, @@ -379,7 +379,7 @@ def text_log_steps(self, request, project, pk=None): try: job = Job.objects.get(repository__name=project, id=pk) except ObjectDoesNotExist: - return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No job with id: {pk}", status=HTTP_404_NOT_FOUND) textlog_steps = ( TextLogStep.objects.filter(job=job) @@ -398,7 +398,7 @@ def text_log_errors(self, request, project, pk=None): try: job = Job.objects.get(repository__name=project, id=pk) except Job.DoesNotExist: - return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No job with id: {pk}", status=HTTP_404_NOT_FOUND) textlog_errors = ( TextLogError.objects.filter(job=job) .select_related("_metadata", "_metadata__failure_line") @@ -417,7 +417,7 @@ def bug_suggestions(self, request, project, pk=None): try: job = Job.objects.get(repository__name=project, id=pk) except ObjectDoesNotExist: - return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No job with id: {pk}", status=HTTP_404_NOT_FOUND) return Response(get_error_summary(job)) @@ -430,13 +430,13 @@ def similar_jobs(self, request, project, pk=None): repository = Repository.objects.get(name=project) except Repository.DoesNotExist: return Response( - {"detail": "No project with name {}".format(project)}, status=HTTP_404_NOT_FOUND + {"detail": f"No project with name {project}"}, status=HTTP_404_NOT_FOUND ) try: job = Job.objects.get(repository=repository, id=pk) except ObjectDoesNotExist: - return Response("No job with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No job with id: {pk}", status=HTTP_404_NOT_FOUND) filter_params = request.query_params.copy() diff --git a/treeherder/webapp/api/note.py b/treeherder/webapp/api/note.py index 94c6d40a04e..2ba421fb258 100644 --- a/treeherder/webapp/api/note.py +++ b/treeherder/webapp/api/note.py @@ -34,7 +34,7 @@ def retrieve(self, request, project, pk=None): serializer = JobNoteSerializer(JobNote.objects.get(id=pk)) return Response(serializer.data) except JobNote.DoesNotExist: - return Response("No note with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No note with id: {pk}", status=HTTP_404_NOT_FOUND) def list(self, request, project): """ @@ -116,7 +116,7 @@ def create(self, request, project): exc_info=True, ) - return Response({"message": "note stored for job {0}".format(request.data["job_id"])}) + return Response({"message": "note stored for job {}".format(request.data["job_id"])}) def destroy(self, request, project, pk=None): """ @@ -127,4 +127,4 @@ def destroy(self, request, project, pk=None): note.delete() return Response({"message": "Note deleted"}) except JobNote.DoesNotExist: - return Response("No note with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No note with id: {pk}", status=HTTP_404_NOT_FOUND) diff --git a/treeherder/webapp/api/perfcompare_utils.py b/treeherder/webapp/api/perfcompare_utils.py index 89d3d3ad80f..697faf867c4 100644 --- a/treeherder/webapp/api/perfcompare_utils.py +++ b/treeherder/webapp/api/perfcompare_utils.py @@ -30,16 +30,16 @@ def get_test_suite(suite, test): - return suite if test == "" or test == suite else "{} {}".format(suite, test) + return suite if test == "" or test == suite else f"{suite} {test}" def get_header_name(extra_options, option_name, test_suite): - name = "{} {} {}".format(test_suite, option_name, extra_options) + name = f"{test_suite} {option_name} {extra_options}" return name def get_sig_identifier(header, platform): - return "{} {}".format(header, platform) + return f"{header} {platform}" def get_option_collection_map(): diff --git a/treeherder/webapp/api/performance_data.py b/treeherder/webapp/api/performance_data.py index 58a646bd2ed..df3aa06af26 100644 --- a/treeherder/webapp/api/performance_data.py +++ b/treeherder/webapp/api/performance_data.py @@ -1,7 +1,6 @@ import datetime import time from collections import defaultdict -from typing import List from urllib.parse import urlencode import django_filters @@ -819,7 +818,7 @@ def list(self, request): return Response(data=serialized_data) @staticmethod - def _filter_out_retriggers(serialized_data: List[dict]) -> List[dict]: + def _filter_out_retriggers(serialized_data): """ Removes data points resulted from retriggers """ @@ -889,7 +888,7 @@ def list(self, request): new_push = models.Push.objects.get(revision=new_rev, repository__name=new_repo_name) except models.Push.DoesNotExist: return Response( - "No new push with revision {} from repo {}.".format(new_rev, new_repo_name), + f"No new push with revision {new_rev} from repo {new_repo_name}.", status=HTTP_400_BAD_REQUEST, ) @@ -910,7 +909,7 @@ def list(self, request): end_day = new_push.time except models.Push.DoesNotExist: return Response( - "No base push with revision {} from repo {}.".format(base_rev, base_repo_name), + f"No base push with revision {base_rev} from repo {base_repo_name}.", status=HTTP_400_BAD_REQUEST, ) @@ -1179,7 +1178,7 @@ def _get_filtered_signatures_by_interval(signatures, interval): ) @staticmethod - def _get_signatures_values(signatures: List[PerformanceSignature]): + def _get_signatures_values(signatures): return signatures.values( "framework_id", "id", diff --git a/treeherder/webapp/api/performance_serializers.py b/treeherder/webapp/api/performance_serializers.py index cbf422c9ed3..5e56e1601eb 100644 --- a/treeherder/webapp/api/performance_serializers.py +++ b/treeherder/webapp/api/performance_serializers.py @@ -387,7 +387,7 @@ def validate_repository(self, repository): Repository.objects.get(name=repository) except ObjectDoesNotExist: - raise serializers.ValidationError("{} does not exist.".format(repository)) + raise serializers.ValidationError(f"{repository} does not exist.") return repository @@ -445,7 +445,7 @@ class Meta: def get_name(self, value): test = value["test"] suite = value["suite"] - test_suite = suite if test == "" or test == suite else "{} {}".format(suite, test) + test_suite = suite if test == "" or test == suite else f"{suite} {test}" return "{} {} {}".format(test_suite, value["option_name"], value["extra_options"]) diff --git a/treeherder/webapp/api/push.py b/treeherder/webapp/api/push.py index 6b1dbb8d87e..212e14f006c 100644 --- a/treeherder/webapp/api/push.py +++ b/treeherder/webapp/api/push.py @@ -66,7 +66,7 @@ def list(self, request, project): repository = Repository.objects.get(name=project) except Repository.DoesNotExist: return Response( - {"detail": "No project with name {}".format(project)}, status=HTTP_404_NOT_FOUND + {"detail": f"No project with name {project}"}, status=HTTP_404_NOT_FOUND ) pushes = pushes.filter(repository=repository) @@ -125,7 +125,7 @@ def list(self, request, project): value = datetime.datetime.fromtimestamp(float(filter_params.get(param))) except ValueError: return Response( - {"detail": "Invalid timestamp specified for {}".format(param)}, + {"detail": f"Invalid timestamp specified for {param}"}, status=HTTP_400_BAD_REQUEST, ) pushes = pushes.filter(**{param.replace("push_timestamp", "time"): value}) @@ -135,7 +135,7 @@ def list(self, request, project): value = int(filter_params.get(param, 0)) except ValueError: return Response( - {"detail": "Invalid timestamp specified for {}".format(param)}, + {"detail": f"Invalid timestamp specified for {param}"}, status=HTTP_400_BAD_REQUEST, ) if value: @@ -168,7 +168,7 @@ def list(self, request, project): return Response({"detail": "Valid count value required"}, status=HTTP_400_BAD_REQUEST) if count > MAX_PUSH_COUNT: - msg = "Specified count exceeds api limit: {}".format(MAX_PUSH_COUNT) + msg = f"Specified count exceeds api limit: {MAX_PUSH_COUNT}" return Response({"detail": msg}, status=HTTP_400_BAD_REQUEST) # we used to have a "full" parameter for this endpoint so you could @@ -196,7 +196,7 @@ def retrieve(self, request, project, pk=None): serializer = PushSerializer(push) return Response(serializer.data) except Push.DoesNotExist: - return Response("No push with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No push with id: {pk}", status=HTTP_404_NOT_FOUND) @action(detail=True) def status(self, request, project, pk=None): @@ -207,7 +207,7 @@ def status(self, request, project, pk=None): try: push = Push.objects.get(id=pk) except Push.DoesNotExist: - return Response("No push with id: {0}".format(pk), status=HTTP_404_NOT_FOUND) + return Response(f"No push with id: {pk}", status=HTTP_404_NOT_FOUND) return Response(push.get_status()) @action(detail=False) @@ -228,9 +228,7 @@ def health_summary(self, request, project): revision__in=revision.split(","), repository__name=project ) except Push.DoesNotExist: - return Response( - "No push with revision: {0}".format(revision), status=HTTP_404_NOT_FOUND - ) + return Response(f"No push with revision: {revision}", status=HTTP_404_NOT_FOUND) else: try: pushes = ( @@ -246,9 +244,7 @@ def health_summary(self, request, project): pushes = pushes[: int(count)] except Push.DoesNotExist: - return Response( - "No pushes found for author: {0}".format(author), status=HTTP_404_NOT_FOUND - ) + return Response(f"No pushes found for author: {author}", status=HTTP_404_NOT_FOUND) data = [] commit_history = None @@ -337,9 +333,7 @@ def health(self, request, project): repository = Repository.objects.get(name=project) push = Push.objects.get(revision=revision, repository=repository) except Push.DoesNotExist: - return Response( - "No push with revision: {0}".format(revision), status=HTTP_404_NOT_FOUND - ) + return Response(f"No push with revision: {revision}", status=HTTP_404_NOT_FOUND) commit_history_details = None result_status, jobs = get_test_failure_jobs(push) @@ -448,10 +442,10 @@ def decisiontask(self, request, project): for job in decision_jobs } ) - logger.error("/decisiontask/ found no decision jobs for {}".format(push_ids)) + logger.error(f"/decisiontask/ found no decision jobs for {push_ids}") self.get_decision_jobs.invalidate(push_ids) return Response( - "No decision tasks found for pushes: {}".format(push_ids), status=HTTP_404_NOT_FOUND + f"No decision tasks found for pushes: {push_ids}", status=HTTP_404_NOT_FOUND ) # TODO: Remove when we no longer support short revisions: Bug 1306707 @@ -473,9 +467,7 @@ def group_results(self, request, project): repository = Repository.objects.get(name=project) push = Push.objects.get(revision=revision, repository=repository) except Push.DoesNotExist: - return Response( - "No push with revision: {0}".format(revision), status=HTTP_404_NOT_FOUND - ) + return Response(f"No push with revision: {revision}", status=HTTP_404_NOT_FOUND) groups = get_group_results(push) return Response(groups) diff --git a/treeherder/webapp/api/serializers.py b/treeherder/webapp/api/serializers.py index 47228f60702..cc1e43fd371 100644 --- a/treeherder/webapp/api/serializers.py +++ b/treeherder/webapp/api/serializers.py @@ -343,7 +343,7 @@ def to_representation(self, value): build_type = value["build_type"] platform = value["job__machine_platform__platform"] test_suite = value["job__signature__job_type_name"] - new_string = test_suite.replace("test-{}".format(platform), "") + new_string = test_suite.replace(f"test-{platform}", "") new_test_suite = new_string.replace(build_type, "") return re.sub(r"^.(/|-)|(/|-)$", "", new_test_suite) @@ -402,7 +402,7 @@ def validate_tree(self, tree): models.Repository.objects.get(name=tree) except ObjectDoesNotExist: - raise serializers.ValidationError("{} does not exist.".format(tree)) + raise serializers.ValidationError(f"{tree} does not exist.") return tree