diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c2047da67e10..43564d067e24c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -177,6 +177,11 @@ repos: - id: fix-encoding-pragma args: - --remove + - repo: https://github.com/asottile/pyupgrade + rev: v2.7.3 + hooks: + - id: pyupgrade + args: ["--py36-plus"] - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.6.0 hooks: diff --git a/BREEZE.rst b/BREEZE.rst index d0d5b1cb6654f..3e84e77d43b32 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -2005,8 +2005,8 @@ This is the current syntax for `./breeze <./breeze>`_: helm-lint incorrect-use-of-LoggingMixin insert-license isort language-matters lint-dockerfile lint-openapi mermaid mixed-line-ending mypy mypy-helm no-relative-imports pre-commit-descriptions provide-create-sessions pydevd - pydocstyle pylint pylint-tests python-no-log-warn restrict-start_date rst-backticks - setup-order setup-installation shellcheck sort-in-the-wild stylelint + pydocstyle pylint pylint-tests python-no-log-warn pyupgrade restrict-start_date + rst-backticks setup-order setup-installation shellcheck sort-in-the-wild stylelint trailing-whitespace update-breeze-file update-extras update-local-yml-file update-setup-cfg-file yamllint diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 990852ca84119..b525394aba0b2 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -94,6 +94,8 @@ require Breeze Docker images to be installed locally: ----------------------------------- ---------------------------------------------------------------- ------------ ``fix-encoding-pragma`` Removes encoding header from python files. ----------------------------------- ---------------------------------------------------------------- ------------ +``pyupgrade`` Runs PyUpgrade +----------------------------------- ---------------------------------------------------------------- ------------ ``flake8`` Runs flake8. * ----------------------------------- ---------------------------------------------------------------- ------------ ``forbid-tabs`` Fails if tabs are used in the project. diff --git a/airflow/api/auth/backend/kerberos_auth.py b/airflow/api/auth/backend/kerberos_auth.py index d3474840b7448..41042dec32c01 100644 --- a/airflow/api/auth/backend/kerberos_auth.py +++ b/airflow/api/auth/backend/kerberos_auth.py @@ -79,7 +79,7 @@ def init_app(app): service = 'airflow' - _KERBEROS_SERVICE.service_name = "{}@{}".format(service, hostname) + _KERBEROS_SERVICE.service_name = f"{service}@{hostname}" if 'KRB5_KTNAME' not in os.environ: os.environ['KRB5_KTNAME'] = conf.get('kerberos', 'keytab') diff --git a/airflow/api/client/json_client.py b/airflow/api/client/json_client.py index efcfc297f8253..c17307f3f1508 100644 --- a/airflow/api/client/json_client.py +++ b/airflow/api/client/json_client.py @@ -43,7 +43,7 @@ def _request(self, url, method='GET', json=None): return resp.json() def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None): - endpoint = '/api/experimental/dags/{}/dag_runs'.format(dag_id) + endpoint = f'/api/experimental/dags/{dag_id}/dag_runs' url = urljoin(self._api_base_url, endpoint) data = self._request(url, method='POST', json={ @@ -54,13 +54,13 @@ def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None): return data['message'] def delete_dag(self, dag_id): - endpoint = '/api/experimental/dags/{}/delete_dag'.format(dag_id) + endpoint = f'/api/experimental/dags/{dag_id}/delete_dag' url = urljoin(self._api_base_url, endpoint) data = self._request(url, method='DELETE') return data['message'] def get_pool(self, name): - endpoint = '/api/experimental/pools/{}'.format(name) + endpoint = f'/api/experimental/pools/{name}' url = urljoin(self._api_base_url, endpoint) pool = self._request(url) return pool['pool'], pool['slots'], pool['description'] @@ -83,7 +83,7 @@ def create_pool(self, name, slots, description): return pool['pool'], pool['slots'], pool['description'] def delete_pool(self, name): - endpoint = '/api/experimental/pools/{}'.format(name) + endpoint = f'/api/experimental/pools/{name}' url = urljoin(self._api_base_url, endpoint) pool = self._request(url, method='DELETE') return pool['pool'], pool['slots'], pool['description'] diff --git a/airflow/api/client/local_client.py b/airflow/api/client/local_client.py index ec4ff0d9f018c..5c08f1ab3eb13 100644 --- a/airflow/api/client/local_client.py +++ b/airflow/api/client/local_client.py @@ -30,11 +30,11 @@ def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None): run_id=run_id, conf=conf, execution_date=execution_date) - return "Created {}".format(dag_run) + return f"Created {dag_run}" def delete_dag(self, dag_id): count = delete_dag.delete_dag(dag_id) - return "Removed {} record(s)".format(count) + return f"Removed {count} record(s)" def get_pool(self, name): the_pool = pool.get_pool(name=name) diff --git a/airflow/api/common/experimental/__init__.py b/airflow/api/common/experimental/__init__.py index 447cbba313b23..ebaea5e658322 100644 --- a/airflow/api/common/experimental/__init__.py +++ b/airflow/api/common/experimental/__init__.py @@ -27,7 +27,7 @@ def check_and_get_dag(dag_id: str, task_id: Optional[str] = None) -> DagModel: """Checks that DAG exists and in case it is specified that Task exist""" dag_model = DagModel.get_current(dag_id) if dag_model is None: - raise DagNotFound("Dag id {} not found in DagModel".format(dag_id)) + raise DagNotFound(f"Dag id {dag_id} not found in DagModel") dagbag = DagBag( dag_folder=dag_model.fileloc, @@ -35,10 +35,10 @@ def check_and_get_dag(dag_id: str, task_id: Optional[str] = None) -> DagModel: ) dag = dagbag.get_dag(dag_id) if not dag: - error_message = "Dag id {} not found".format(dag_id) + error_message = f"Dag id {dag_id} not found" raise DagNotFound(error_message) if task_id and not dag.has_task(task_id): - error_message = 'Task {} not found in dag {}'.format(task_id, dag_id) + error_message = f'Task {task_id} not found in dag {dag_id}' raise TaskNotFound(error_message) return dag diff --git a/airflow/api/common/experimental/delete_dag.py b/airflow/api/common/experimental/delete_dag.py index 970309f0d4ee0..ee50c7d60090e 100644 --- a/airflow/api/common/experimental/delete_dag.py +++ b/airflow/api/common/experimental/delete_dag.py @@ -42,7 +42,7 @@ def delete_dag(dag_id: str, keep_records_in_log: bool = True, session=None) -> i log.info("Deleting DAG: %s", dag_id) dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).first() if dag is None: - raise DagNotFound("Dag id {} not found".format(dag_id)) + raise DagNotFound(f"Dag id {dag_id} not found") # Scheduler removes DAGs without files from serialized_dag table every dag_dir_list_interval. # There may be a lag, so explicitly removes serialized DAG here. diff --git a/airflow/api/common/experimental/mark_tasks.py b/airflow/api/common/experimental/mark_tasks.py index fe3af16ef06dc..3f1cb8363e486 100644 --- a/airflow/api/common/experimental/mark_tasks.py +++ b/airflow/api/common/experimental/mark_tasks.py @@ -94,11 +94,11 @@ def set_state( return [] if not timezone.is_localized(execution_date): - raise ValueError("Received non-localized date {}".format(execution_date)) + raise ValueError(f"Received non-localized date {execution_date}") task_dags = {task.dag for task in tasks} if len(task_dags) > 1: - raise ValueError("Received tasks from multiple DAGs: {}".format(task_dags)) + raise ValueError(f"Received tasks from multiple DAGs: {task_dags}") dag = next(iter(task_dags)) if dag is None: raise ValueError("Received tasks with no DAG") @@ -247,7 +247,7 @@ def get_execution_dates(dag, execution_date, future, past): """Returns dates of DAG execution""" latest_execution_date = dag.get_latest_execution_date() if latest_execution_date is None: - raise ValueError("Received non-localized date {}".format(execution_date)) + raise ValueError(f"Received non-localized date {execution_date}") # determine date range of dag runs and tasks to consider end_date = latest_execution_date if future else execution_date if 'start_date' in dag.default_args: diff --git a/airflow/api/common/experimental/trigger_dag.py b/airflow/api/common/experimental/trigger_dag.py index ed4d22ac3914c..e1d3ceebff242 100644 --- a/airflow/api/common/experimental/trigger_dag.py +++ b/airflow/api/common/experimental/trigger_dag.py @@ -48,7 +48,7 @@ def _trigger_dag( dag = dag_bag.get_dag(dag_id) # prefetch dag if it is stored serialized if dag_id not in dag_bag.dags: - raise DagNotFound("Dag id {} not found".format(dag_id)) + raise DagNotFound(f"Dag id {dag_id} not found") execution_date = execution_date if execution_date else timezone.utcnow() @@ -62,7 +62,7 @@ def _trigger_dag( min_dag_start_date = dag.default_args["start_date"] if min_dag_start_date and execution_date < min_dag_start_date: raise ValueError( - "The execution_date [{0}] should be >= start_date [{1}] from DAG's default_args".format( + "The execution_date [{}] should be >= start_date [{}] from DAG's default_args".format( execution_date.isoformat(), min_dag_start_date.isoformat())) @@ -112,7 +112,7 @@ def trigger_dag( """ dag_model = DagModel.get_current(dag_id) if dag_model is None: - raise DagNotFound("Dag id {} not found in DagModel".format(dag_id)) + raise DagNotFound(f"Dag id {dag_id} not found in DagModel") dagbag = DagBag(dag_folder=dag_model.fileloc, read_dags_from_db=True) triggers = _trigger_dag( diff --git a/airflow/api_connexion/endpoints/task_instance_endpoint.py b/airflow/api_connexion/endpoints/task_instance_endpoint.py index e23dff91d8302..37e9b02cdc4f6 100644 --- a/airflow/api_connexion/endpoints/task_instance_endpoint.py +++ b/airflow/api_connexion/endpoints/task_instance_endpoint.py @@ -245,7 +245,7 @@ def post_clear_task_instances(dag_id: str, session=None): dag = current_app.dag_bag.get_dag(dag_id) if not dag: - error_message = "Dag id {} not found".format(dag_id) + error_message = f"Dag id {dag_id} not found" raise NotFound(error_message) reset_dag_runs = data.pop('reset_dag_runs') task_instances = dag.clear(get_tis=True, **data) @@ -287,7 +287,7 @@ def post_set_task_instances_state(dag_id, session): except ValidationError as err: raise BadRequest(detail=str(err.messages)) - error_message = "Dag ID {} not found".format(dag_id) + error_message = f"Dag ID {dag_id} not found" try: dag = current_app.dag_bag.get_dag(dag_id) if not dag: @@ -300,7 +300,7 @@ def post_set_task_instances_state(dag_id, session): task = dag.task_dict.get(task_id) if not task: - error_message = "Task ID {} not found".format(task_id) + error_message = f"Task ID {task_id} not found" raise NotFound(error_message) tis = set_state( diff --git a/airflow/api_connexion/schemas/common_schema.py b/airflow/api_connexion/schemas/common_schema.py index 7874d27fdff3a..f8d6e48296d83 100644 --- a/airflow/api_connexion/schemas/common_schema.py +++ b/airflow/api_connexion/schemas/common_schema.py @@ -124,7 +124,7 @@ def get_obj_type(self, obj): elif isinstance(obj, CronExpression): return "CronExpression" else: - raise Exception("Unknown object type: {}".format(obj.__class__.__name__)) + raise Exception(f"Unknown object type: {obj.__class__.__name__}") class ColorField(fields.String): diff --git a/airflow/cli/commands/connection_command.py b/airflow/cli/commands/connection_command.py index caee57587863d..3a8d31cd5d155 100644 --- a/airflow/cli/commands/connection_command.py +++ b/airflow/cli/commands/connection_command.py @@ -182,7 +182,7 @@ def connections_add(args): missing_args.append('conn-uri or conn-type') if missing_args: msg = ('The following args are required to add a connection:' + - ' {missing!r}'.format(missing=missing_args)) + f' {missing_args!r}') raise SystemExit(msg) if invalid_args: msg = ('The following args are not compatible with the ' + diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py index 5b4da82143b81..456b5b9838ddf 100644 --- a/airflow/cli/commands/dag_command.py +++ b/airflow/cli/commands/dag_command.py @@ -110,10 +110,10 @@ def dag_backfill(args, dag=None): run_conf = json.loads(args.conf) if args.dry_run: - print("Dry run of DAG {0} on {1}".format(args.dag_id, - args.start_date)) + print("Dry run of DAG {} on {}".format(args.dag_id, + args.start_date)) for task in dag.tasks: - print("Task {0}".format(task.task_id)) + print(f"Task {task.task_id}") ti = TaskInstance(task, args.start_date) ti.dry_run() else: @@ -239,7 +239,7 @@ def _display_dot_via_imgcat(dot: Dot): def _save_dot_to_file(dot: Dot, filename: str): filename_without_ext, _, ext = filename.rpartition('.') dot.render(filename=filename_without_ext, format=ext, cleanup=True) - print("File {} saved".format(filename)) + print(f"File {filename} saved") @cli_utils.action_logging @@ -319,7 +319,7 @@ def dag_list_jobs(args, dag=None): dagbag = DagBag() if args.dag_id not in dagbag.dags: - error_message = "Dag id {} not found".format(args.dag_id) + error_message = f"Dag id {args.dag_id} not found" raise AirflowException(error_message) queries.append(BaseJob.dag_id == args.dag_id) @@ -350,7 +350,7 @@ def dag_list_dag_runs(args, dag=None): dagbag = DagBag() if args.dag_id is not None and args.dag_id not in dagbag.dags: - error_message = "Dag id {} not found".format(args.dag_id) + error_message = f"Dag id {args.dag_id} not found" raise AirflowException(error_message) state = args.state.lower() if args.state else None @@ -363,7 +363,7 @@ def dag_list_dag_runs(args, dag=None): ) if not dag_runs: - print('No dag runs for {dag_id}'.format(dag_id=args.dag_id)) + print(f'No dag runs for {args.dag_id}') return dag_runs.sort(key=lambda x: x.execution_date, reverse=True) diff --git a/airflow/cli/commands/pool_command.py b/airflow/cli/commands/pool_command.py index bb3a07e0730fa..d824bd40ee16b 100644 --- a/airflow/cli/commands/pool_command.py +++ b/airflow/cli/commands/pool_command.py @@ -85,7 +85,7 @@ def pool_import_helper(filepath): """Helps import pools from the json file""" api_client = get_current_api_client() - with open(filepath, 'r') as poolfile: + with open(filepath) as poolfile: data = poolfile.read() try: # pylint: disable=too-many-nested-blocks pools_json = json.loads(data) diff --git a/airflow/cli/commands/task_command.py b/airflow/cli/commands/task_command.py index d4bc9619603ea..46f670b5742c0 100644 --- a/airflow/cli/commands/task_command.py +++ b/airflow/cli/commands/task_command.py @@ -143,7 +143,7 @@ def task_run(args, dag=None): """Runs a single task instance""" # Load custom airflow config if args.cfg_path: - with open(args.cfg_path, 'r') as conf_file: + with open(args.cfg_path) as conf_file: conf_dict = json.load(conf_file) if os.path.exists(args.cfg_path): @@ -238,7 +238,7 @@ def task_failed_deps(args): if failed_deps: print("Task instance dependencies not met:") for dep in failed_deps: - print("{}: {}".format(dep.dep_name, dep.reason)) + print(f"{dep.dep_name}: {dep.reason}") else: print("Task instance dependencies are all met.") diff --git a/airflow/cli/commands/user_command.py b/airflow/cli/commands/user_command.py index 883f349c7ab7c..90a8f2a1f35eb 100644 --- a/airflow/cli/commands/user_command.py +++ b/airflow/cli/commands/user_command.py @@ -48,7 +48,7 @@ def users_create(args): role = appbuilder.sm.find_role(args.role) if not role: valid_roles = appbuilder.sm.get_all_roles() - raise SystemExit('{} is not a valid role. Valid roles are: {}'.format(args.role, valid_roles)) + raise SystemExit(f'{args.role} is not a valid role. Valid roles are: {valid_roles}') if args.use_random_password: password = ''.join(random.choice(string.printable) for _ in range(16)) @@ -61,12 +61,12 @@ def users_create(args): raise SystemExit('Passwords did not match!') if appbuilder.sm.find_user(args.username): - print('{} already exist in the db'.format(args.username)) + print(f'{args.username} already exist in the db') return user = appbuilder.sm.add_user(args.username, args.firstname, args.lastname, args.email, role, password) if user: - print('{} user {} created.'.format(args.role, args.username)) + print(f'{args.role} user {args.username} created.') else: raise SystemExit('Failed to create user.') @@ -80,10 +80,10 @@ def users_delete(args): user = next(u for u in appbuilder.sm.get_all_users() if u.username == args.username) except StopIteration: - raise SystemExit('{} is not a valid user.'.format(args.username)) + raise SystemExit(f'{args.username} is not a valid user.') if appbuilder.sm.del_register_user(user): - print('User {} deleted.'.format(args.username)) + print(f'User {args.username} deleted.') else: raise SystemExit('Failed to delete user.') @@ -108,7 +108,7 @@ def users_manage_role(args, remove=False): role = appbuilder.sm.find_role(args.role) if not role: valid_roles = appbuilder.sm.get_all_roles() - raise SystemExit('{} is not a valid role. Valid roles are: {}'.format(args.role, valid_roles)) + raise SystemExit(f'{args.role} is not a valid role. Valid roles are: {valid_roles}') if remove: if role in user.roles: @@ -167,10 +167,10 @@ def users_import(args): users_list = None # pylint: disable=redefined-outer-name try: - with open(json_file, 'r') as file: + with open(json_file) as file: users_list = json.loads(file.read()) except ValueError as e: - print("File '{}' is not valid JSON. Error: {}".format(json_file, e)) + print(f"File '{json_file}' is not valid JSON. Error: {e}") sys.exit(1) users_created, users_updated = _import_users(users_list) @@ -194,7 +194,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name role = appbuilder.sm.find_role(rolename) if not role: valid_roles = appbuilder.sm.get_all_roles() - print("Error: '{}' is not a valid role. Valid roles are: {}".format(rolename, valid_roles)) + print(f"Error: '{rolename}' is not a valid role. Valid roles are: {valid_roles}") sys.exit(1) else: roles.append(role) diff --git a/airflow/cli/commands/variable_command.py b/airflow/cli/commands/variable_command.py index f3a2d513253f5..1781b3667625a 100644 --- a/airflow/cli/commands/variable_command.py +++ b/airflow/cli/commands/variable_command.py @@ -82,7 +82,7 @@ def variables_export(args): def _import_helper(filepath): """Helps import variables from the file""" - with open(filepath, 'r') as varfile: + with open(filepath) as varfile: data = varfile.read() try: @@ -101,7 +101,7 @@ def _import_helper(filepath): suc_count += 1 print("{} of {} variables successfully updated.".format(suc_count, len(var_json))) if fail_count: - print("{} variable(s) failed to be updated.".format(fail_count)) + print(f"{fail_count} variable(s) failed to be updated.") def _variable_export_helper(filepath): diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py index 63f9df585ea56..5a4f7e24f986b 100644 --- a/airflow/cli/commands/webserver_command.py +++ b/airflow/cli/commands/webserver_command.py @@ -153,7 +153,7 @@ def _wait_until_true(self, fn, timeout: int = 0) -> None: while not fn(): if 0 < timeout <= time.time() - start_time: raise AirflowWebServerTimeout( - "No response from gunicorn master within {0} seconds".format(timeout) + f"No response from gunicorn master within {timeout} seconds" ) sleep(0.1) @@ -328,7 +328,7 @@ def webserver(args): if args.debug: print( - "Starting the web server on port {0} and host {1}.".format( + "Starting the web server on port {} and host {}.".format( args.port, args.hostname)) app = create_app(testing=conf.getboolean('core', 'unit_test_mode')) app.run(debug=True, use_reloader=not app.config['TESTING'], diff --git a/airflow/configuration.py b/airflow/configuration.py index daee52e4bdbec..bc4b041a3e50e 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -268,7 +268,7 @@ def _create_future_warning(name, section, current_value, new_value, version): @staticmethod def _env_var_name(section, key): - return 'AIRFLOW__{S}__{K}'.format(S=section.upper(), K=key.upper()) + return f'AIRFLOW__{section.upper()}__{key.upper()}' def _get_env_var_option(self, section, key): # must have format AIRFLOW__{SECTION}__{KEY} (note double underscore) @@ -506,7 +506,7 @@ def getsection(self, section: str) -> Optional[Dict[str, Union[str, int, float, if section in self._sections: # type: ignore _section.update(copy.deepcopy(self._sections[section])) # type: ignore - section_prefix = 'AIRFLOW__{S}__'.format(S=section.upper()) + section_prefix = f'AIRFLOW__{section.upper()}__' for env_var in sorted(os.environ.keys()): if env_var.startswith(section_prefix): key = env_var.replace(section_prefix, '') diff --git a/airflow/example_dags/example_skip_dag.py b/airflow/example_dags/example_skip_dag.py index e5ffdd6e9b14d..7a0cc675911c3 100644 --- a/airflow/example_dags/example_skip_dag.py +++ b/airflow/example_dags/example_skip_dag.py @@ -46,10 +46,10 @@ def create_test_pipeline(suffix, trigger_rule, dag_): :param str trigger_rule: TriggerRule for the join task :param DAG dag_: The DAG to run the operators on """ - skip_operator = DummySkipOperator(task_id='skip_operator_{}'.format(suffix), dag=dag_) - always_true = DummyOperator(task_id='always_true_{}'.format(suffix), dag=dag_) + skip_operator = DummySkipOperator(task_id=f'skip_operator_{suffix}', dag=dag_) + always_true = DummyOperator(task_id=f'always_true_{suffix}', dag=dag_) join = DummyOperator(task_id=trigger_rule, dag=dag_, trigger_rule=trigger_rule) - final = DummyOperator(task_id='final_{}'.format(suffix), dag=dag_) + final = DummyOperator(task_id=f'final_{suffix}', dag=dag_) skip_operator >> join always_true >> join diff --git a/airflow/example_dags/subdags/subdag.py b/airflow/example_dags/subdags/subdag.py index 44f6aa6bf98bc..e65a5c98eaa49 100644 --- a/airflow/example_dags/subdags/subdag.py +++ b/airflow/example_dags/subdags/subdag.py @@ -35,7 +35,7 @@ def subdag(parent_dag_name, child_dag_name, args): :rtype: airflow.models.DAG """ dag_subdag = DAG( - dag_id='%s.%s' % (parent_dag_name, child_dag_name), + dag_id=f'{parent_dag_name}.{child_dag_name}', default_args=args, start_date=days_ago(2), schedule_interval="@daily", @@ -43,7 +43,7 @@ def subdag(parent_dag_name, child_dag_name, args): for i in range(5): DummyOperator( - task_id='%s-task-%s' % (child_dag_name, i + 1), + task_id='{}-task-{}'.format(child_dag_name, i + 1), default_args=args, dag=dag_subdag, ) diff --git a/airflow/example_dags/tutorial_decorated_etl_dag.py b/airflow/example_dags/tutorial_decorated_etl_dag.py index 1091ec627d533..0f78940d824e3 100644 --- a/airflow/example_dags/tutorial_decorated_etl_dag.py +++ b/airflow/example_dags/tutorial_decorated_etl_dag.py @@ -67,7 +67,7 @@ def extract(): A simple Extract task to get data ready for the rest of the data pipeline. In this case, getting data is simulated by reading from a hardcoded JSON string. """ - data_string = u'{"1001": 301.27, "1002": 433.21, "1003": 502.22}' + data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' order_data_dict = json.loads(data_string) return order_data_dict diff --git a/airflow/example_dags/tutorial_etl_dag.py b/airflow/example_dags/tutorial_etl_dag.py index 431ab9709787a..4a4405e0d2c18 100644 --- a/airflow/example_dags/tutorial_etl_dag.py +++ b/airflow/example_dags/tutorial_etl_dag.py @@ -61,7 +61,7 @@ # [START extract_function] def extract(**kwargs): ti = kwargs['ti'] - data_string = u'{"1001": 301.27, "1002": 433.21, "1003": 502.22}' + data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}' ti.xcom_push('order_data', data_string) # [END extract_function] diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 9872c5f67c605..716d1ed973873 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -161,7 +161,7 @@ def send_task_to_executor(task_tuple: TaskInstanceInCelery) \ with timeout(seconds=OPERATION_TIMEOUT): result = task_to_run.apply_async(args=[command], queue=queue) except Exception as e: # pylint: disable=broad-except - exception_traceback = "Celery Task ID: {}\n{}".format(key, traceback.format_exc()) + exception_traceback = f"Celery Task ID: {key}\n{traceback.format_exc()}" result = ExceptionWithTraceback(e, exception_traceback) return key, command, result diff --git a/airflow/executors/kubernetes_executor.py b/airflow/executors/kubernetes_executor.py index f5c95afcdfa86..79b75b38b18a2 100644 --- a/airflow/executors/kubernetes_executor.py +++ b/airflow/executors/kubernetes_executor.py @@ -187,7 +187,7 @@ def _run(self, ) watcher = watch.Watch() - kwargs = {'label_selector': 'airflow-worker={}'.format(scheduler_job_id)} + kwargs = {'label_selector': f'airflow-worker={scheduler_job_id}'} if resource_version: kwargs['resource_version'] = resource_version if kube_config.kube_client_request_args: @@ -571,7 +571,7 @@ def _create_or_update_secret(secret_name, secret_path): return self.kube_client.create_namespaced_secret( self.kube_config.executor_namespace, kubernetes.client.V1Secret( data={ - 'key.json': base64.b64encode(open(secret_path, 'r').read())}, + 'key.json': base64.b64encode(open(secret_path).read())}, metadata=kubernetes.client.V1ObjectMeta(name=secret_name)), **self.kube_config.kube_client_request_args) except ApiException as e: @@ -580,7 +580,7 @@ def _create_or_update_secret(secret_name, secret_path): secret_name, self.kube_config.executor_namespace, kubernetes.client.V1Secret( data={'key.json': base64.b64encode( - open(secret_path, 'r').read())}, + open(secret_path).read())}, metadata=kubernetes.client.V1ObjectMeta(name=secret_name)), **self.kube_config.kube_client_request_args) self.log.exception( diff --git a/airflow/hooks/dbapi_hook.py b/airflow/hooks/dbapi_hook.py index 3a803a15969ac..fde2463409df0 100644 --- a/airflow/hooks/dbapi_hook.py +++ b/airflow/hooks/dbapi_hook.py @@ -85,7 +85,7 @@ def get_uri(self) -> str: login = '{conn.login}:{conn.password}@'.format(conn=conn) host = conn.host if conn.port is not None: - host += ':{port}'.format(port=conn.port) + host += f':{conn.port}' uri = '{conn.conn_type}://{login}{host}/'.format( conn=conn, login=login, host=host) if conn.schema: @@ -242,7 +242,7 @@ def _generate_insert_sql(table, values, target_fields, replace, **kwargs): if target_fields: target_fields = ", ".join(target_fields) - target_fields = "({})".format(target_fields) + target_fields = f"({target_fields})" else: target_fields = '' @@ -250,7 +250,7 @@ def _generate_insert_sql(table, values, target_fields, replace, **kwargs): sql = "INSERT INTO " else: sql = "REPLACE INTO " - sql += "{0} {1} VALUES ({2})".format( + sql += "{} {} VALUES ({})".format( table, target_fields, ",".join(placeholders)) diff --git a/airflow/jobs/backfill_job.py b/airflow/jobs/backfill_job.py index f594b26068552..ad7ba436e5f6b 100644 --- a/airflow/jobs/backfill_job.py +++ b/airflow/jobs/backfill_job.py @@ -566,13 +566,13 @@ def _per_task_process(key, ti, session=None): # pylint: disable=too-many-return .filter(models.Pool.pool == task.pool) \ .first() if not pool: - raise PoolNotFound('Unknown pool: {}'.format(task.pool)) + raise PoolNotFound(f'Unknown pool: {task.pool}') open_slots = pool.open_slots(session=session) if open_slots <= 0: raise NoAvailablePoolSlot( "Not scheduling since there are " - "{0} open slots in pool {1}".format( + "{} open slots in pool {}".format( open_slots, task.pool)) num_running_task_instances_in_dag = DAG.get_num_task_instances( diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py index a014a9524cb69..38a79ffa51fb2 100644 --- a/airflow/jobs/scheduler_job.py +++ b/airflow/jobs/scheduler_job.py @@ -159,7 +159,7 @@ def _run_file_processor( del parent_channel set_context(log, file_path) - setproctitle("airflow scheduler - DagFileProcessor {}".format(file_path)) + setproctitle(f"airflow scheduler - DagFileProcessor {file_path}") try: # redirect stdout/stderr to log @@ -212,10 +212,10 @@ def start(self) -> None: self.file_path, self._pickle_dags, self._dag_ids, - "DagFileProcessor{}".format(self._instance_id), + f"DagFileProcessor{self._instance_id}", self._callback_requests ), - name="DagFileProcessor{}-Process".format(self._instance_id) + name=f"DagFileProcessor{self._instance_id}-Process" ) self._process = process self._start_time = timezone.utcnow() diff --git a/airflow/kubernetes/pod_launcher.py b/airflow/kubernetes/pod_launcher.py index 8a38716337fdc..e528e2c4ae3a0 100644 --- a/airflow/kubernetes/pod_launcher.py +++ b/airflow/kubernetes/pod_launcher.py @@ -168,7 +168,7 @@ def parse_log_line(self, line: str) -> Tuple[str, str]: """ split_at = line.find(' ') if split_at == -1: - raise Exception('Log not in "{{timestamp}} {{log}}" format. Got: {}'.format(line)) + raise Exception(f'Log not in "{{timestamp}} {{log}}" format. Got: {line}') timestamp = line[:split_at] message = line[split_at + 1:].rstrip() return timestamp, message @@ -229,7 +229,7 @@ def read_pod_logs(self, ) except BaseHTTPError as e: raise AirflowException( - 'There was an error reading the kubernetes API: {}'.format(e) + f'There was an error reading the kubernetes API: {e}' ) @tenacity.retry( @@ -242,11 +242,11 @@ def read_pod_events(self, pod): try: return self._client.list_namespaced_event( namespace=pod.metadata.namespace, - field_selector="involvedObject.name={}".format(pod.metadata.name) + field_selector=f"involvedObject.name={pod.metadata.name}" ) except BaseHTTPError as e: raise AirflowException( - 'There was an error reading the kubernetes API: {}'.format(e) + f'There was an error reading the kubernetes API: {e}' ) @tenacity.retry( @@ -260,7 +260,7 @@ def read_pod(self, pod: V1Pod): return self._client.read_namespaced_pod(pod.metadata.name, pod.metadata.namespace) except BaseHTTPError as e: raise AirflowException( - 'There was an error reading the kubernetes API: {}'.format(e) + f'There was an error reading the kubernetes API: {e}' ) def _extract_xcom(self, pod: V1Pod): @@ -272,12 +272,12 @@ def _extract_xcom(self, pod: V1Pod): _preload_content=False) try: result = self._exec_pod_command( - resp, 'cat {}/return.json'.format(PodDefaults.XCOM_MOUNT_PATH)) + resp, f'cat {PodDefaults.XCOM_MOUNT_PATH}/return.json') self._exec_pod_command(resp, 'kill -s SIGINT 1') finally: resp.close() if result is None: - raise AirflowException('Failed to extract xcom from pod: {}'.format(pod.metadata.name)) + raise AirflowException(f'Failed to extract xcom from pod: {pod.metadata.name}') return result def _exec_pod_command(self, resp, command): diff --git a/airflow/kubernetes/secret.py b/airflow/kubernetes/secret.py index c31b749a29ce4..197464f562a78 100644 --- a/airflow/kubernetes/secret.py +++ b/airflow/kubernetes/secret.py @@ -89,7 +89,7 @@ def to_env_from_secret(self) -> k8s.V1EnvFromSource: def to_volume_secret(self) -> Tuple[k8s.V1Volume, k8s.V1VolumeMount]: """Converts to volume secret""" - vol_id = 'secretvol{}'.format(uuid.uuid4()) + vol_id = f'secretvol{uuid.uuid4()}' volume = k8s.V1Volume(name=vol_id, secret=k8s.V1SecretVolumeSource(secret_name=self.secret)) if self.items: volume.secret.items = self.items diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index d85beff099024..5e61ad40f1b36 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -588,7 +588,7 @@ def dag(self) -> Any: return self._dag else: raise AirflowException( - 'Operator {} has not been assigned to a DAG yet'.format(self)) + f'Operator {self} has not been assigned to a DAG yet') @dag.setter def dag(self, dag: Any): @@ -602,10 +602,10 @@ def dag(self, dag: Any): return if not isinstance(dag, DAG): raise TypeError( - 'Expected DAG; received {}'.format(dag.__class__.__name__)) + f'Expected DAG; received {dag.__class__.__name__}') elif self.has_dag() and self.dag is not dag: raise AirflowException( - "The DAG assigned to {} can not be changed.".format(self)) + f"The DAG assigned to {self} can not be changed.") elif self.task_id not in dag.task_dict: dag.add_task(self) elif self.task_id in dag.task_dict and dag.task_dict[self.task_id] is not self: diff --git a/airflow/models/connection.py b/airflow/models/connection.py index c2e0f159652c9..d724a3f18122c 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -242,9 +242,9 @@ def get_uri(self) -> str: if self.port: if host_block > '': - host_block += ':{}'.format(self.port) + host_block += f':{self.port}' else: - host_block += '@:{}'.format(self.port) + host_block += f'@:{self.port}' if self.schema: host_block += '/{}'.format(quote(self.schema, safe='')) @@ -321,7 +321,7 @@ def get_hook(self): """Return hook based on conn_type.""" hook_class_name, conn_id_param = CONN_TYPE_TO_HOOK.get(self.conn_type, (None, None)) if not hook_class_name: - raise AirflowException('Unknown hook type "{}"'.format(self.conn_type)) + raise AirflowException(f'Unknown hook type "{self.conn_type}"') hook_class = import_string(hook_class_name) return hook_class(**{conn_id_param: self.conn_id}) @@ -395,4 +395,4 @@ def get_connections_from_secrets(cls, conn_id: str) -> List['Connection']: conn_list = secrets_backend.get_connections(conn_id=conn_id) if conn_list: return list(conn_list) - raise AirflowNotFoundException("The conn_id `{0}` isn't defined".format(conn_id)) + raise AirflowNotFoundException(f"The conn_id `{conn_id}` isn't defined") diff --git a/airflow/models/crypto.py b/airflow/models/crypto.py index 1b5a9e928b8c4..8b55448055a91 100644 --- a/airflow/models/crypto.py +++ b/airflow/models/crypto.py @@ -90,6 +90,6 @@ def get_fernet(): ]) _fernet.is_encrypted = True except (ValueError, TypeError) as value_error: - raise AirflowException("Could not create Fernet object: {}".format(value_error)) + raise AirflowException(f"Could not create Fernet object: {value_error}") return _fernet diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 5c190ed0d22ff..166553d9db415 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -351,7 +351,7 @@ def __init__( self._task_group = TaskGroup.create_root(self) def __repr__(self): - return "".format(self=self) + return f"" def __eq__(self, other): if (type(self) == type(other) and @@ -1251,9 +1251,9 @@ def clear( dag_bag = DagBag(read_dags_from_db=True) external_dag = dag_bag.get_dag(tii.dag_id) if not external_dag: - raise AirflowException("Could not find dag {}".format(tii.dag_id)) + raise AirflowException(f"Could not find dag {tii.dag_id}") downstream = external_dag.sub_dag( - task_ids_or_regex=r"^{}$".format(tii.task_id), + task_ids_or_regex=fr"^{tii.task_id}$", include_upstream=False, include_downstream=True ) @@ -1505,7 +1505,7 @@ def get_task(self, task_id: str, include_subdags: bool = False) -> BaseOperator: for dag in self.subdags: if task_id in dag.task_dict: return dag.task_dict[task_id] - raise TaskNotFound("Task {task_id} not found".format(task_id=task_id)) + raise TaskNotFound(f"Task {task_id} not found") def pickle_info(self): d = {} @@ -1582,7 +1582,7 @@ def add_task(self, task): if ((task.task_id in self.task_dict and self.task_dict[task.task_id] is not task) or task.task_id in self._task_group.used_group_ids): raise DuplicateTaskIdFound( - "Task id '{}' has already been added to the DAG".format(task.task_id)) + f"Task id '{task.task_id}' has already been added to the DAG") else: self.task_dict[task.task_id] = task task.dag = self @@ -2073,7 +2073,7 @@ def __init__(self, **kwargs): self.has_task_concurrency_limits = True def __repr__(self): - return "".format(self=self) + return f"" @property def timezone(self): diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index e9adf47315e6f..6ae17d143a6b7 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -336,7 +336,7 @@ def get_dag(self): :return: DAG """ if not self.dag: - raise AirflowException("The DAG (.dag) for {} needs to be set".format(self)) + raise AirflowException(f"The DAG (.dag) for {self} needs to be set") return self.dag @@ -407,7 +407,7 @@ def update_state( unfinished_tasks, finished_tasks, session) or changed_tis duration = (timezone.utcnow() - start_dttm) - Stats.timing("dagrun.dependency-check.{}".format(self.dag_id), duration) + Stats.timing(f"dagrun.dependency-check.{self.dag_id}", duration) leaf_task_ids = {t.task_id for t in dag.leaves} leaf_tis = [ti for ti in tis if ti.task_id in leaf_task_ids] @@ -558,9 +558,9 @@ def _emit_duration_stats_for_finished_state(self): duration = (self.end_date - self.start_date) if self.state is State.SUCCESS: - Stats.timing('dagrun.duration.success.{}'.format(self.dag_id), duration) + Stats.timing(f'dagrun.duration.success.{self.dag_id}', duration) elif self.state == State.FAILED: - Stats.timing('dagrun.duration.failed.{}'.format(self.dag_id), duration) + Stats.timing(f'dagrun.duration.failed.{self.dag_id}', duration) @provide_session def verify_integrity(self, session: Session = None): @@ -589,14 +589,14 @@ def verify_integrity(self, session: Session = None): self.log.warning("Failed to get task '%s' for dag '%s'. " "Marking it as removed.", ti, dag) Stats.incr( - "task_removed_from_dag.{}".format(dag.dag_id), 1, 1) + f"task_removed_from_dag.{dag.dag_id}", 1, 1) ti.state = State.REMOVED should_restore_task = (task is not None) and ti.state == State.REMOVED if should_restore_task: self.log.info("Restoring task '%s' which was previously " "removed from DAG '%s'", ti, dag) - Stats.incr("task_restored_to_dag.{}".format(dag.dag_id), 1, 1) + Stats.incr(f"task_restored_to_dag.{dag.dag_id}", 1, 1) ti.state = State.NONE session.merge(ti) @@ -607,7 +607,7 @@ def verify_integrity(self, session: Session = None): if task.task_id not in task_ids: Stats.incr( - "task_instance_created-{}".format(task.task_type), + f"task_instance_created-{task.task_type}", 1, 1) ti = TI(task, self.execution_date) task_instance_mutation_hook(ti) diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index a1af753858f94..872682d780179 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -336,7 +336,7 @@ def command_as_list( # pylint: disable=too-many-arguments should_pass_filepath = not pickle_id and dag if should_pass_filepath and dag.full_filepath != dag.filepath: - path = "DAGS_FOLDER/{}".format(dag.filepath) + path = f"DAGS_FOLDER/{dag.filepath}" elif should_pass_filepath and dag.full_filepath: path = dag.full_filepath else: @@ -1070,7 +1070,7 @@ def _run_raw_task( context = {} # type: Dict actual_start_date = timezone.utcnow() - Stats.incr('ti.start.{}.{}'.format(task.dag_id, task.task_id)) + Stats.incr(f'ti.start.{task.dag_id}.{task.task_id}') try: if not mark_success: context = self.get_template_context() @@ -1117,7 +1117,7 @@ def _run_raw_task( self.handle_failure(e, test_mode, context) raise finally: - Stats.incr('ti.finish.{}.{}.{}'.format(task.dag_id, task.task_id, self.state)) + Stats.incr(f'ti.finish.{task.dag_id}.{task.task_id}.{self.state}') self._run_success_callback(context, task) @@ -1188,7 +1188,7 @@ def _run_mini_scheduler_on_child_tasks(self, session=None) -> None: except OperationalError as e: # Any kind of DB error here is _non fatal_ as this block is just an optimisation. self.log.info( - "Skipping mini scheduling run due to exception: {}".format(e.statement), + f"Skipping mini scheduling run due to exception: {e.statement}", exc_info=True, ) session.rollback() @@ -1221,7 +1221,7 @@ def signal_handler(signum, frame): # pylint: disable=unused-argument # Export context to make it available for operators to use. airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True) self.log.info("Exporting the following env vars:\n%s", - '\n'.join(["{}={}".format(k, v) + '\n'.join([f"{k}={v}" for k, v in airflow_context_vars.items()])) os.environ.update(airflow_context_vars) @@ -1258,7 +1258,7 @@ def signal_handler(signum, frame): # pylint: disable=unused-argument Stats.timing('dag.{dag_id}.{task_id}.duration'.format(dag_id=task_copy.dag_id, task_id=task_copy.task_id), duration) - Stats.incr('operator_successes_{}'.format(self.task.task_type), 1, 1) + Stats.incr(f'operator_successes_{self.task.task_type}', 1, 1) Stats.incr('ti_successes') @provide_session @@ -1391,7 +1391,7 @@ def handle_failure(self, error, test_mode=None, context=None, force_fail=False, task = self.task self.end_date = timezone.utcnow() self.set_duration() - Stats.incr('operator_failures_{}'.format(task.task_type), 1, 1) + Stats.incr(f'operator_failures_{task.task_type}', 1, 1) Stats.incr('ti_failures') if not test_mode: session.add(Log(State.FAILED, self)) diff --git a/airflow/models/variable.py b/airflow/models/variable.py index d180628443793..a95e1c905492b 100644 --- a/airflow/models/variable.py +++ b/airflow/models/variable.py @@ -52,7 +52,7 @@ def __init__(self, key=None, val=None): def __repr__(self): # Hiding the value - return '{} : {}'.format(self.key, self._val) + return f'{self.key} : {self._val}' def get_val(self): """Get Airflow Variable from Metadata DB and decode it using the Fernet Key""" @@ -126,7 +126,7 @@ def get( if default_var is not cls.__NO_DEFAULT_SENTINEL: return default_var else: - raise KeyError('Variable {} does not exist'.format(key)) + raise KeyError(f'Variable {key} does not exist') else: if deserialize_json: return json.loads(var_val) diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py index 55a5278d6dbde..6300bb05a22b2 100644 --- a/airflow/models/xcom.py +++ b/airflow/models/xcom.py @@ -231,7 +231,7 @@ def delete(cls, xcoms, session=None): for xcom in xcoms: if not isinstance(xcom, XCom): raise TypeError( - 'Expected XCom; received {}'.format(xcom.__class__.__name__) + f'Expected XCom; received {xcom.__class__.__name__}' ) session.delete(xcom) session.commit() diff --git a/airflow/operators/bash.py b/airflow/operators/bash.py index 6b5995b0b4d15..7f99a609bb9cf 100644 --- a/airflow/operators/bash.py +++ b/airflow/operators/bash.py @@ -137,7 +137,7 @@ def execute(self, context): airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True) self.log.debug('Exporting the following env vars:\n%s', - '\n'.join(["{}={}".format(k, v) + '\n'.join([f"{k}={v}" for k, v in airflow_context_vars.items()])) env.update(airflow_context_vars) diff --git a/airflow/operators/python.py b/airflow/operators/python.py index f51f576cdf57d..344f0ecaceb4b 100644 --- a/airflow/operators/python.py +++ b/airflow/operators/python.py @@ -121,7 +121,7 @@ def determine_op_kwargs(python_callable: Callable, if name in context_keys: # Raise an exception to let the user know that the keyword is reserved raise ValueError( - "The key {} in the op_args is part of the context, and therefore reserved".format(name) + f"The key {name} in the op_args is part of the context, and therefore reserved" ) if any(str(param).startswith("**") for _, param in sig): diff --git a/airflow/operators/sql.py b/airflow/operators/sql.py index 24a67fc42c299..45cb07c2c131e 100644 --- a/airflow/operators/sql.py +++ b/airflow/operators/sql.py @@ -192,7 +192,7 @@ def execute(self, context=None): numeric_records = self._to_float(records) except (ValueError, TypeError): raise AirflowException( - "Converting a result to float failed.\n{}".format(error_msg) + f"Converting a result to float failed.\n{error_msg}" ) tests = self._get_numeric_matches(numeric_records, pass_value_conv) else: @@ -314,9 +314,9 @@ def execute(self, context=None): row1 = hook.get_first(self.sql1) if not row2: - raise AirflowException("The query {} returned None".format(self.sql2)) + raise AirflowException(f"The query {self.sql2} returned None") if not row1: - raise AirflowException("The query {} returned None".format(self.sql1)) + raise AirflowException(f"The query {self.sql1} returned None") current = dict(zip(self.metrics_sorted, row1)) reference = dict(zip(self.metrics_sorted, row2)) @@ -368,7 +368,7 @@ def execute(self, context=None): self.metrics_thresholds[k], ) raise AirflowException( - "The following tests have failed:\n {0}".format( + "The following tests have failed:\n {}".format( ", ".join(sorted(failed_tests)) ) ) diff --git a/airflow/operators/subdag_operator.py b/airflow/operators/subdag_operator.py index 96a18f7c5b193..7ef0392de68b8 100644 --- a/airflow/operators/subdag_operator.py +++ b/airflow/operators/subdag_operator.py @@ -173,7 +173,7 @@ def post_execute(self, context, result=None): if dag_run.state != State.SUCCESS: raise AirflowException( - "Expected state: SUCCESS. Actual state: {}".format(dag_run.state) + f"Expected state: SUCCESS. Actual state: {dag_run.state}" ) if self.propagate_skipped_state and self._check_skipped_states(context): @@ -187,7 +187,7 @@ def _check_skipped_states(self, context): if self.propagate_skipped_state == SkippedStatePropagationOptions.ALL_LEAVES: return all(ti.state == State.SKIPPED for ti in leaves_tis) raise AirflowException( - 'Unimplemented SkippedStatePropagationOptions {} used.'.format(self.propagate_skipped_state)) + f'Unimplemented SkippedStatePropagationOptions {self.propagate_skipped_state} used.') def _get_leaves_tis(self, execution_date): leaves_tis = [] diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/airflow/providers/amazon/aws/hooks/base_aws.py index 7e24086362815..b16ea435f7f01 100644 --- a/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/airflow/providers/amazon/aws/hooks/base_aws.py @@ -439,7 +439,7 @@ def _parse_s3_config( if config.read(config_file_name): # pragma: no cover sections = config.sections() else: - raise AirflowException("Couldn't read {0}".format(config_file_name)) + raise AirflowException(f"Couldn't read {config_file_name}") # Setting option names depending on file format if config_format is None: config_format = "boto" diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py index a0479b70c88df..84ff4c4644ae9 100644 --- a/airflow/providers/amazon/aws/hooks/batch_client.py +++ b/airflow/providers/amazon/aws/hooks/batch_client.py @@ -253,12 +253,12 @@ def check_job_success(self, job_id: str) -> bool: return True if job_status == "FAILED": - raise AirflowException("AWS Batch job ({}) failed: {}".format(job_id, job)) + raise AirflowException(f"AWS Batch job ({job_id}) failed: {job}") if job_status in ["SUBMITTED", "PENDING", "RUNNABLE", "STARTING", "RUNNING"]: - raise AirflowException("AWS Batch job ({}) is not complete: {}".format(job_id, job)) + raise AirflowException(f"AWS Batch job ({job_id}) is not complete: {job}") - raise AirflowException("AWS Batch job ({}) has unknown status: {}".format(job_id, job)) + raise AirflowException(f"AWS Batch job ({job_id}) has unknown status: {job}") def wait_for_job(self, job_id: str, delay: Union[int, float, None] = None) -> None: """ @@ -352,7 +352,7 @@ def poll_job_status(self, job_id: str, match_status: List[str]) -> bool: return True if retries >= self.max_retries: - raise AirflowException("AWS Batch job ({}) status checks exceed max_retries".format(job_id)) + raise AirflowException(f"AWS Batch job ({job_id}) status checks exceed max_retries") retries += 1 pause = self.exponential_delay(retries) @@ -388,7 +388,7 @@ def get_job_description(self, job_id: str) -> Dict: if error.get("Code") == "TooManyRequestsException": pass # allow it to retry, if possible else: - raise AirflowException("AWS Batch job ({}) description error: {}".format(job_id, err)) + raise AirflowException(f"AWS Batch job ({job_id}) description error: {err}") retries += 1 if retries >= self.status_retries: @@ -426,9 +426,7 @@ def parse_job_description(job_id: str, response: Dict) -> Dict: jobs = response.get("jobs", []) matching_jobs = [job for job in jobs if job.get("jobId") == job_id] if len(matching_jobs) != 1: - raise AirflowException( - "AWS Batch job ({}) description error: response: {}".format(job_id, response) - ) + raise AirflowException(f"AWS Batch job ({job_id}) description error: response: {response}") return matching_jobs[0] diff --git a/airflow/providers/amazon/aws/hooks/datasync.py b/airflow/providers/amazon/aws/hooks/datasync.py index e7c0e3aad167d..e8508f9c24a0e 100644 --- a/airflow/providers/amazon/aws/hooks/datasync.py +++ b/airflow/providers/amazon/aws/hooks/datasync.py @@ -79,7 +79,7 @@ def create_location(self, location_uri: str, **create_location_kwargs) -> str: elif typ == "efs": location = self.get_conn().create_loction_efs(**create_location_kwargs) else: - raise AirflowException("Invalid location type: {0}".format(typ)) + raise AirflowException(f"Invalid location type: {typ}") self._refresh_locations() return location["LocationArn"] diff --git a/airflow/providers/amazon/aws/hooks/s3.py b/airflow/providers/amazon/aws/hooks/s3.py index af403dcf7cede..61640aef1c121 100644 --- a/airflow/providers/amazon/aws/hooks/s3.py +++ b/airflow/providers/amazon/aws/hooks/s3.py @@ -110,7 +110,7 @@ def __init__(self, *args, **kwargs) -> None: if 'extra_args' in kwargs: self.extra_args = kwargs['extra_args'] if not isinstance(self.extra_args, dict): - raise ValueError("extra_args '%r' must be of type %s" % (self.extra_args, dict)) + raise ValueError(f"extra_args '{self.extra_args!r}' must be of type {dict}") del kwargs['extra_args'] super().__init__(*args, **kwargs) @@ -128,7 +128,7 @@ def parse_s3_url(s3url: str) -> Tuple[str, str]: parsed_url = urlparse(s3url) if not parsed_url.netloc: - raise AirflowException('Please provide a bucket_name instead of "{s3url}"'.format(s3url=s3url)) + raise AirflowException(f'Please provide a bucket_name instead of "{s3url}"') bucket_name = parsed_url.netloc key = parsed_url.path.strip('/') @@ -199,7 +199,7 @@ def check_for_prefix(self, prefix: str, delimiter: str, bucket_name: Optional[st :rtype: bool """ prefix = prefix + delimiter if prefix[-1] != delimiter else prefix - prefix_split = re.split(r'(\w+[{d}])$'.format(d=delimiter), prefix, 1) + prefix_split = re.split(fr'(\w+[{delimiter}])$', prefix, 1) previous_level = prefix_split[0] plist = self.list_prefixes(bucket_name, previous_level, delimiter) return prefix in plist @@ -482,7 +482,7 @@ def load_file( :type acl_policy: str """ if not replace and self.check_for_key(key, bucket_name): - raise ValueError("The key {key} already exists.".format(key=key)) + raise ValueError(f"The key {key} already exists.") extra_args = self.extra_args if encrypt: @@ -621,7 +621,7 @@ def _upload_file_obj( acl_policy: Optional[str] = None, ) -> None: if not replace and self.check_for_key(key, bucket_name): - raise ValueError("The key {key} already exists.".format(key=key)) + raise ValueError(f"The key {key} already exists.") extra_args = self.extra_args if encrypt: @@ -749,7 +749,7 @@ def delete_objects(self, bucket: str, keys: Union[str, list]) -> None: self.log.info("Deleted: %s", deleted_keys) if "Errors" in response: errors_keys = [x['Key'] for x in response.get("Errors", [])] - raise AirflowException("Errors when deleting: {}".format(errors_keys)) + raise AirflowException(f"Errors when deleting: {errors_keys}") @provide_bucket_name @unify_bucket_name_and_key diff --git a/airflow/providers/amazon/aws/hooks/sagemaker.py b/airflow/providers/amazon/aws/hooks/sagemaker.py index af2733dc5a4b4..9190d27e1eb69 100644 --- a/airflow/providers/amazon/aws/hooks/sagemaker.py +++ b/airflow/providers/amazon/aws/hooks/sagemaker.py @@ -205,7 +205,7 @@ def check_s3_url(self, s3url: str) -> bool: """ bucket, key = S3Hook.parse_s3_url(s3url) if not self.s3_hook.check_for_bucket(bucket_name=bucket): - raise AirflowException("The input S3 Bucket {} does not exist ".format(bucket)) + raise AirflowException(f"The input S3 Bucket {bucket} does not exist ") if ( key and not self.s3_hook.check_for_key(key=key, bucket_name=bucket) @@ -842,7 +842,7 @@ def check_training_status_with_log( status = last_description['TrainingJobStatus'] if status in failed_states: reason = last_description.get('FailureReason', '(No reason provided)') - raise AirflowException('Error training {}: {} Reason: {}'.format(job_name, status, reason)) + raise AirflowException(f'Error training {job_name}: {status} Reason: {reason}') billable_time = ( last_description['TrainingEndTime'] - last_description['TrainingStartTime'] ) * instance_count diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index 922e9ec0a9ed1..3c8cb72e266e2 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -86,7 +86,7 @@ def close(self): remote_loc = os.path.join(self.remote_base, self.log_relative_path) if os.path.exists(local_loc): # read log and remove old logs to get just the latest additions - with open(local_loc, 'r') as logfile: + with open(local_loc) as logfile: log = logfile.read() self.s3_write(log, remote_loc) @@ -114,7 +114,7 @@ def _read(self, ti, try_number, metadata=None): # local machine even if there are errors reading remote logs, as # returned remote_log will contain error messages. remote_log = self.s3_read(remote_loc, return_error=True) - log = '*** Reading remote log from {}.\n{}\n'.format(remote_loc, remote_log) + log = f'*** Reading remote log from {remote_loc}.\n{remote_log}\n' return log, {'end_of_log': True} else: return super()._read(ti, try_number) @@ -148,7 +148,7 @@ def s3_read(self, remote_log_location: str, return_error: bool = False) -> str: try: return self.hook.read_key(remote_log_location) except Exception: # pylint: disable=broad-except - msg = 'Could not read logs from {}'.format(remote_log_location) + msg = f'Could not read logs from {remote_log_location}' self.log.exception(msg) # return error if needed if return_error: diff --git a/airflow/providers/amazon/aws/operators/datasync.py b/airflow/providers/amazon/aws/operators/datasync.py index 2573da1a505b7..661054afd85b7 100644 --- a/airflow/providers/amazon/aws/operators/datasync.py +++ b/airflow/providers/amazon/aws/operators/datasync.py @@ -158,7 +158,7 @@ def __init__( if not valid: raise AirflowException( "Either specify task_arn or both source_location_uri and destination_location_uri. " - "task_arn={0} source_location_uri={1} destination_location_uri={2}".format( + "task_arn={} source_location_uri={} destination_location_uri={}".format( task_arn, source_location_uri, destination_location_uri ) ) @@ -259,7 +259,7 @@ def choose_task(self, task_arn_list: list) -> Optional[str]: # from AWS and might lead to confusion. Rather explicitly # choose a random one return random.choice(task_arn_list) - raise AirflowException("Unable to choose a Task from {}".format(task_arn_list)) + raise AirflowException(f"Unable to choose a Task from {task_arn_list}") def choose_location(self, location_arn_list: List[str]) -> Optional[str]: """Select 1 DataSync LocationArn from a list""" @@ -273,7 +273,7 @@ def choose_location(self, location_arn_list: List[str]) -> Optional[str]: # from AWS and might lead to confusion. Rather explicitly # choose a random one return random.choice(location_arn_list) - raise AirflowException("Unable to choose a Location from {}".format(location_arn_list)) + raise AirflowException(f"Unable to choose a Location from {location_arn_list}") def _create_datasync_task(self) -> None: """Create a AWS DataSyncTask.""" diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py index eed4fce50a502..2ec170667c284 100644 --- a/airflow/providers/amazon/aws/operators/ecs.py +++ b/airflow/providers/amazon/aws/operators/ecs.py @@ -271,7 +271,7 @@ def _check_success_task(self) -> None: if self.awslogs_group and self.awslogs_stream_prefix: self.log.info('ECS Task logs output:') task_id = self.arn.split("/")[-1] - stream_name = "{}/{}".format(self.awslogs_stream_prefix, task_id) + stream_name = f"{self.awslogs_stream_prefix}/{task_id}" for event in self.get_logs_hook().get_log_events(self.awslogs_group, stream_name): event_dt = datetime.fromtimestamp(event['timestamp'] / 1000.0) self.log.info("[%s] %s", event_dt.isoformat(), event['message']) @@ -293,9 +293,9 @@ def _check_success_task(self) -> None: containers = task['containers'] for container in containers: if container.get('lastStatus') == 'STOPPED' and container['exitCode'] != 0: - raise AirflowException('This task is not in success state {}'.format(task)) + raise AirflowException(f'This task is not in success state {task}') elif container.get('lastStatus') == 'PENDING': - raise AirflowException('This task is still pending {}'.format(task)) + raise AirflowException(f'This task is still pending {task}') elif 'error' in container.get('reason', '').lower(): raise AirflowException( 'This containers encounter an error during launching : {}'.format( diff --git a/airflow/providers/amazon/aws/operators/s3_file_transform.py b/airflow/providers/amazon/aws/operators/s3_file_transform.py index e2aa822f282a9..65849ec09e814 100644 --- a/airflow/providers/amazon/aws/operators/s3_file_transform.py +++ b/airflow/providers/amazon/aws/operators/s3_file_transform.py @@ -121,7 +121,7 @@ def execute(self, context): self.log.info("Downloading source S3 file %s", self.source_s3_key) if not source_s3.check_for_key(self.source_s3_key): - raise AirflowException("The source key {0} does not exist".format(self.source_s3_key)) + raise AirflowException(f"The source key {self.source_s3_key} does not exist") source_s3_key_object = source_s3.get_key(self.source_s3_key) with NamedTemporaryFile("wb") as f_source, NamedTemporaryFile("wb") as f_dest: @@ -149,7 +149,7 @@ def execute(self, context): process.wait() if process.returncode: - raise AirflowException("Transform script failed: {0}".format(process.returncode)) + raise AirflowException(f"Transform script failed: {process.returncode}") else: self.log.info( "Transform script successful. Output temporarily located at %s", f_dest.name diff --git a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py index 73adec286ebd5..0a9c9059e25b1 100644 --- a/airflow/providers/amazon/aws/transfers/redshift_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/redshift_to_s3.py @@ -108,8 +108,8 @@ def execute(self, context) -> None: credentials = s3_hook.get_credentials() unload_options = '\n\t\t\t'.join(self.unload_options) - s3_key = '{}/{}_'.format(self.s3_key, self.table) if self.table_as_file_name else self.s3_key - select_query = "SELECT * FROM {schema}.{table}".format(schema=self.schema, table=self.table) + s3_key = f'{self.s3_key}/{self.table}_' if self.table_as_file_name else self.s3_key + select_query = f"SELECT * FROM {self.schema}.{self.table}" unload_query = """ UNLOAD ('{select_query}') TO 's3://{s3_bucket}/{s3_key}' diff --git a/airflow/providers/apache/druid/hooks/druid.py b/airflow/providers/apache/druid/hooks/druid.py index 04add1c58ab87..23935909b39f0 100644 --- a/airflow/providers/apache/druid/hooks/druid.py +++ b/airflow/providers/apache/druid/hooks/druid.py @@ -104,13 +104,13 @@ def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None: sec = 0 while running: - req_status = requests.get("{0}/{1}/status".format(url, druid_task_id), auth=self.get_auth()) + req_status = requests.get(f"{url}/{druid_task_id}/status", auth=self.get_auth()) self.log.info("Job still running for %s seconds...", sec) if self.max_ingestion_time and sec > self.max_ingestion_time: # ensure that the job gets killed if the max ingestion time is exceeded - requests.post("{0}/{1}/shutdown".format(url, druid_task_id), auth=self.get_auth()) + requests.post(f"{url}/{druid_task_id}/shutdown", auth=self.get_auth()) raise AirflowException('Druid ingestion took more than ' f'{self.max_ingestion_time} seconds') time.sleep(self.timeout) @@ -165,10 +165,10 @@ def get_uri(self) -> str: conn = self.get_connection(getattr(self, self.conn_name_attr)) host = conn.host if conn.port is not None: - host += ':{port}'.format(port=conn.port) + host += f':{conn.port}' conn_type = 'druid' if not conn.conn_type else conn.conn_type endpoint = conn.extra_dejson.get('endpoint', 'druid/v2/sql') - return '{conn_type}://{host}/{endpoint}'.format(conn_type=conn_type, host=host, endpoint=endpoint) + return f'{conn_type}://{host}/{endpoint}' def set_autocommit(self, conn: connect, autocommit: bool) -> NotImplemented: raise NotImplementedError() diff --git a/airflow/providers/apache/druid/transfers/hive_to_druid.py b/airflow/providers/apache/druid/transfers/hive_to_druid.py index 99d4f4deae311..ce775361019c4 100644 --- a/airflow/providers/apache/druid/transfers/hive_to_druid.py +++ b/airflow/providers/apache/druid/transfers/hive_to_druid.py @@ -123,7 +123,7 @@ def execute(self, context: Dict[str, Any]) -> None: self.log.info("Extracting data from Hive") hive_table = 'druid.' + context['task_instance_key_str'].replace('.', '_') sql = self.sql.strip().strip(';') - tblproperties = ''.join([", '{}' = '{}'".format(k, v) for k, v in self.hive_tblproperties.items()]) + tblproperties = ''.join([f", '{k}' = '{v}'" for k, v in self.hive_tblproperties.items()]) hql = f"""\ SET mapred.output.compress=false; SET hive.exec.compress.output=false; @@ -162,7 +162,7 @@ def execute(self, context: Dict[str, Any]) -> None: self.log.info("Load seems to have succeeded!") finally: self.log.info("Cleaning up by dropping the temp Hive table %s", hive_table) - hql = "DROP TABLE IF EXISTS {}".format(hive_table) + hql = f"DROP TABLE IF EXISTS {hive_table}" hive.run_cli(hql) def construct_ingest_query(self, static_path: str, columns: List[str]) -> Dict[str, Any]: diff --git a/airflow/providers/apache/hdfs/hooks/webhdfs.py b/airflow/providers/apache/hdfs/hooks/webhdfs.py index bc24601cc1abb..11817a4fae628 100644 --- a/airflow/providers/apache/hdfs/hooks/webhdfs.py +++ b/airflow/providers/apache/hdfs/hooks/webhdfs.py @@ -92,7 +92,7 @@ def _find_valid_server(self) -> Any: return None def _get_client(self, connection: Connection) -> Any: - connection_str = 'http://{host}:{port}'.format(host=connection.host, port=connection.port) + connection_str = f'http://{connection.host}:{connection.port}' if _kerberos_security_mode: client = KerberosClient(connection_str) diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py index 912a219739600..fb04e39ae43c0 100644 --- a/airflow/providers/apache/hive/hooks/hive.py +++ b/airflow/providers/apache/hive/hooks/hive.py @@ -113,11 +113,11 @@ def _get_proxy_user(self) -> str: proxy_user_value: str = conn.extra_dejson.get('proxy_user', "") if proxy_user_value == "login" and conn.login: - return "hive.server2.proxy.user={0}".format(conn.login) + return f"hive.server2.proxy.user={conn.login}" if proxy_user_value == "owner" and self.run_as: - return "hive.server2.proxy.user={0}".format(self.run_as) + return f"hive.server2.proxy.user={self.run_as}" if proxy_user_value != "": # There is a custom proxy user - return "hive.server2.proxy.user={0}".format(proxy_user_value) + return f"hive.server2.proxy.user={proxy_user_value}" return proxy_user_value # The default proxy user (undefined) def _prepare_cli_cmd(self) -> List[Any]: @@ -144,7 +144,7 @@ def _prepare_cli_cmd(self) -> List[Any]: elif self.auth: jdbc_url += ";auth=" + self.auth - jdbc_url = '"{}"'.format(jdbc_url) + jdbc_url = f'"{jdbc_url}"' cmd_extra += ['-u', jdbc_url] if conn.login: @@ -174,7 +174,7 @@ def _prepare_hiveconf(d: Dict[Any, Any]) -> List[Any]: """ if not d: return [] - return as_flattened_list(zip(["-hiveconf"] * len(d), ["{}={}".format(k, v) for k, v in d.items()])) + return as_flattened_list(zip(["-hiveconf"] * len(d), [f"{k}={v}" for k, v in d.items()])) def run_cli( self, @@ -203,7 +203,7 @@ def run_cli( conn = self.conn schema = schema or conn.schema if schema: - hql = "USE {schema};\n{hql}".format(schema=schema, hql=hql) + hql = f"USE {schema};\n{hql}" with TemporaryDirectory(prefix='airflow_hiveop_') as tmp_dir: with NamedTemporaryFile(dir=tmp_dir) as f: @@ -220,21 +220,21 @@ def run_cli( hive_conf_params.extend( [ '-hiveconf', - 'mapreduce.job.queuename={}'.format(self.mapred_queue), + f'mapreduce.job.queuename={self.mapred_queue}', '-hiveconf', - 'mapred.job.queue.name={}'.format(self.mapred_queue), + f'mapred.job.queue.name={self.mapred_queue}', '-hiveconf', - 'tez.queue.name={}'.format(self.mapred_queue), + f'tez.queue.name={self.mapred_queue}', ] ) if self.mapred_queue_priority: hive_conf_params.extend( - ['-hiveconf', 'mapreduce.job.priority={}'.format(self.mapred_queue_priority)] + ['-hiveconf', f'mapreduce.job.priority={self.mapred_queue_priority}'] ) if self.mapred_job_name: - hive_conf_params.extend(['-hiveconf', 'mapred.job.name={}'.format(self.mapred_job_name)]) + hive_conf_params.extend(['-hiveconf', f'mapred.job.name={self.mapred_job_name}']) hive_cmd.extend(hive_conf_params) hive_cmd.extend(['-f', f.name]) @@ -421,31 +421,31 @@ def load_file( """ hql = '' if recreate: - hql += "DROP TABLE IF EXISTS {table};\n".format(table=table) + hql += f"DROP TABLE IF EXISTS {table};\n" if create or recreate: if field_dict is None: raise ValueError("Must provide a field dict when creating a table") fields = ",\n ".join(['`{k}` {v}'.format(k=k.strip('`'), v=v) for k, v in field_dict.items()]) - hql += "CREATE TABLE IF NOT EXISTS {table} (\n{fields})\n".format(table=table, fields=fields) + hql += f"CREATE TABLE IF NOT EXISTS {table} (\n{fields})\n" if partition: pfields = ",\n ".join([p + " STRING" for p in partition]) - hql += "PARTITIONED BY ({pfields})\n".format(pfields=pfields) + hql += f"PARTITIONED BY ({pfields})\n" hql += "ROW FORMAT DELIMITED\n" - hql += "FIELDS TERMINATED BY '{delimiter}'\n".format(delimiter=delimiter) + hql += f"FIELDS TERMINATED BY '{delimiter}'\n" hql += "STORED AS textfile\n" if tblproperties is not None: - tprops = ", ".join(["'{0}'='{1}'".format(k, v) for k, v in tblproperties.items()]) - hql += "TBLPROPERTIES({tprops})\n".format(tprops=tprops) + tprops = ", ".join([f"'{k}'='{v}'" for k, v in tblproperties.items()]) + hql += f"TBLPROPERTIES({tprops})\n" hql += ";" self.log.info(hql) self.run_cli(hql) - hql = "LOAD DATA LOCAL INPATH '{filepath}' ".format(filepath=filepath) + hql = f"LOAD DATA LOCAL INPATH '{filepath}' " if overwrite: hql += "OVERWRITE " - hql += "INTO TABLE {table} ".format(table=table) + hql += f"INTO TABLE {table} " if partition: - pvals = ", ".join(["{0}='{1}'".format(k, v) for k, v in partition.items()]) - hql += "PARTITION ({pvals})".format(pvals=pvals) + pvals = ", ".join([f"{k}='{v}'" for k, v in partition.items()]) + hql += f"PARTITION ({pvals})" # As a workaround for HIVE-10541, add a newline character # at the end of hql (AIRFLOW-2412). @@ -873,7 +873,7 @@ def _get_results( if hive_conf: env_context.update(hive_conf) for k, v in env_context.items(): - cur.execute("set {}={}".format(k, v)) + cur.execute(f"set {k}={v}") for statement in hql: cur.execute(statement) diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/airflow/providers/apache/hive/operators/hive_stats.py index 30beccab5548e..d4de591a24e4c 100644 --- a/airflow/providers/apache/hive/operators/hive_stats.py +++ b/airflow/providers/apache/hive/operators/hive_stats.py @@ -133,7 +133,7 @@ def execute(self, context: Optional[Dict[str, Any]] = None) -> None: exprs = OrderedDict(exprs) exprs_str = ",\n ".join([v + " AS " + k[0] + '__' + k[1] for k, v in exprs.items()]) - where_clause_ = ["{} = '{}'".format(k, v) for k, v in self.partition.items()] + where_clause_ = [f"{k} = '{v}'" for k, v in self.partition.items()] where_clause = " AND\n ".join(where_clause_) sql = "SELECT {exprs_str} FROM {table} WHERE {where_clause};".format( exprs_str=exprs_str, table=self.table, where_clause=where_clause diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py index 5079ff3e7dc41..5e5af8c5dbe86 100644 --- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py +++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py @@ -122,7 +122,7 @@ def execute(self, context: Dict[str, str]): col_count = 0 for field in cursor.description: col_count += 1 - col_position = "Column{position}".format(position=col_count) + col_position = f"Column{col_count}" field_dict[col_position if field[0] == '' else field[0]] = self.type_map(field[1]) csv_writer.writerows(cursor) tmp_file.flush() diff --git a/airflow/providers/apache/hive/transfers/s3_to_hive.py b/airflow/providers/apache/hive/transfers/s3_to_hive.py index ae36ef718e03f..962b92fb2b08a 100644 --- a/airflow/providers/apache/hive/transfers/s3_to_hive.py +++ b/airflow/providers/apache/hive/transfers/s3_to_hive.py @@ -239,7 +239,7 @@ def execute(self, context): ) def _get_top_row_as_list(self, file_name): - with open(file_name, 'rt') as file: + with open(file_name) as file: header_line = file.readline().strip() header_list = header_line.split(self.delimiter) return header_list diff --git a/airflow/providers/apache/hive/transfers/vertica_to_hive.py b/airflow/providers/apache/hive/transfers/vertica_to_hive.py index aea700f00ec7a..725dbfc1282d5 100644 --- a/airflow/providers/apache/hive/transfers/vertica_to_hive.py +++ b/airflow/providers/apache/hive/transfers/vertica_to_hive.py @@ -125,7 +125,7 @@ def execute(self, context): col_count = 0 for field in cursor.description: col_count += 1 - col_position = "Column{position}".format(position=col_count) + col_position = f"Column{col_count}" field_dict[col_position if field[0] == '' else field[0]] = self.type_map(field[1]) csv_writer.writerows(cursor.iterate()) f.flush() diff --git a/airflow/providers/apache/kylin/hooks/kylin.py b/airflow/providers/apache/kylin/hooks/kylin.py index f77a5d9aa2c3d..412bc54250b99 100644 --- a/airflow/providers/apache/kylin/hooks/kylin.py +++ b/airflow/providers/apache/kylin/hooks/kylin.py @@ -74,7 +74,7 @@ def cube_run(self, datasource_name, op, **op_args): response = cube_source.invoke_command(op, **op_args) return response except exceptions.KylinError as err: - raise AirflowException("Cube operation {} error , Message: {}".format(op, err)) + raise AirflowException(f"Cube operation {op} error , Message: {err}") def get_job_status(self, job_id): """ diff --git a/airflow/providers/apache/kylin/operators/kylin_cube.py b/airflow/providers/apache/kylin/operators/kylin_cube.py index 059954303c036..92fe73c25a6e5 100644 --- a/airflow/providers/apache/kylin/operators/kylin_cube.py +++ b/airflow/providers/apache/kylin/operators/kylin_cube.py @@ -178,13 +178,13 @@ def execute(self, context): job_status = None while job_status not in self.jobs_end_status: if (timezone.utcnow() - started_at).total_seconds() > self.timeout: - raise AirflowException('kylin job {} timeout'.format(job_id)) + raise AirflowException(f'kylin job {job_id} timeout') time.sleep(self.interval) job_status = _hook.get_job_status(job_id) self.log.info('Kylin job status is %s ', job_status) if job_status in self.jobs_error_status: - raise AirflowException('Kylin job {} status {} is error '.format(job_id, job_status)) + raise AirflowException(f'Kylin job {job_id} status {job_status} is error ') if self.do_xcom_push: return rsp_data diff --git a/airflow/providers/apache/livy/hooks/livy.py b/airflow/providers/apache/livy/hooks/livy.py index d3f88cf087c85..742dfeabc102c 100644 --- a/airflow/providers/apache/livy/hooks/livy.py +++ b/airflow/providers/apache/livy/hooks/livy.py @@ -106,7 +106,7 @@ def run_method( :rtype: requests.Response """ if method not in ('GET', 'POST', 'PUT', 'DELETE', 'HEAD'): - raise ValueError("Invalid http method '{}'".format(method)) + raise ValueError(f"Invalid http method '{method}'") if extra_options is None: extra_options = {'check_response': False} @@ -163,14 +163,14 @@ def get_batch(self, session_id: Union[int, str]) -> Any: self._validate_session_id(session_id) self.log.debug("Fetching info for batch session %d", session_id) - response = self.run_method(endpoint='/batches/{}'.format(session_id)) + response = self.run_method(endpoint=f'/batches/{session_id}') try: response.raise_for_status() except requests.exceptions.HTTPError as err: self.log.warning("Got status code %d for session %d", err.response.status_code, session_id) raise AirflowException( - "Unable to fetch batch with id: {}. Message: {}".format(session_id, err.response.text) + f"Unable to fetch batch with id: {session_id}. Message: {err.response.text}" ) return response.json() @@ -187,19 +187,19 @@ def get_batch_state(self, session_id: Union[int, str]) -> BatchState: self._validate_session_id(session_id) self.log.debug("Fetching info for batch session %d", session_id) - response = self.run_method(endpoint='/batches/{}/state'.format(session_id)) + response = self.run_method(endpoint=f'/batches/{session_id}/state') try: response.raise_for_status() except requests.exceptions.HTTPError as err: self.log.warning("Got status code %d for session %d", err.response.status_code, session_id) raise AirflowException( - "Unable to fetch batch with id: {}. Message: {}".format(session_id, err.response.text) + f"Unable to fetch batch with id: {session_id}. Message: {err.response.text}" ) jresp = response.json() if 'state' not in jresp: - raise AirflowException("Unable to get state for batch with id: {}".format(session_id)) + raise AirflowException(f"Unable to get state for batch with id: {session_id}") return BatchState(jresp['state']) def delete_batch(self, session_id: Union[int, str]) -> Any: @@ -214,7 +214,7 @@ def delete_batch(self, session_id: Union[int, str]) -> Any: self._validate_session_id(session_id) self.log.info("Deleting batch session %d", session_id) - response = self.run_method(method='DELETE', endpoint='/batches/{}'.format(session_id)) + response = self.run_method(method='DELETE', endpoint=f'/batches/{session_id}') try: response.raise_for_status() @@ -360,7 +360,7 @@ def _validate_size_format(size: str) -> bool: :rtype: bool """ if size and not (isinstance(size, str) and re.match(r'^\d+[kmgt]b?$', size, re.IGNORECASE)): - raise ValueError("Invalid java size format for string'{}'".format(size)) + raise ValueError(f"Invalid java size format for string'{size}'") return True @staticmethod diff --git a/airflow/providers/apache/livy/operators/livy.py b/airflow/providers/apache/livy/operators/livy.py index 154f562c56c32..4f302a8d3c1ce 100644 --- a/airflow/providers/apache/livy/operators/livy.py +++ b/airflow/providers/apache/livy/operators/livy.py @@ -157,7 +157,7 @@ def poll_for_termination(self, batch_id: Union[int, str]) -> None: state = hook.get_batch_state(batch_id) self.log.info("Batch with id %s terminated with state: %s", batch_id, state.value) if state != BatchState.SUCCESS: - raise AirflowException("Batch {} did not succeed".format(batch_id)) + raise AirflowException(f"Batch {batch_id} did not succeed") def on_kill(self) -> None: self.kill() diff --git a/airflow/providers/apache/pinot/hooks/pinot.py b/airflow/providers/apache/pinot/hooks/pinot.py index 0ba8b8f224830..842a4923fb178 100644 --- a/airflow/providers/apache/pinot/hooks/pinot.py +++ b/airflow/providers/apache/pinot/hooks/pinot.py @@ -279,10 +279,10 @@ def get_uri(self) -> str: conn = self.get_connection(getattr(self, self.conn_name_attr)) host = conn.host if conn.port is not None: - host += ':{port}'.format(port=conn.port) + host += f':{conn.port}' conn_type = 'http' if not conn.conn_type else conn.conn_type endpoint = conn.extra_dejson.get('endpoint', 'pql') - return '{conn_type}://{host}/{endpoint}'.format(conn_type=conn_type, host=host, endpoint=endpoint) + return f'{conn_type}://{host}/{endpoint}' def get_records(self, sql: str, parameters: Optional[Union[Dict[str, Any], Iterable[Any]]] = None) -> Any: """ diff --git a/airflow/providers/apache/spark/hooks/spark_jdbc.py b/airflow/providers/apache/spark/hooks/spark_jdbc.py index a2b18ae4e30cd..648b1c6b9ce66 100644 --- a/airflow/providers/apache/spark/hooks/spark_jdbc.py +++ b/airflow/providers/apache/spark/hooks/spark_jdbc.py @@ -182,7 +182,7 @@ def _resolve_jdbc_connection(self) -> Dict[str, Any]: try: conn = self.get_connection(self._jdbc_conn_id) if conn.port: - conn_data['url'] = "{}:{}".format(conn.host, conn.port) + conn_data['url'] = f"{conn.host}:{conn.port}" else: conn_data['url'] = conn.host conn_data['schema'] = conn.schema @@ -202,7 +202,7 @@ def _build_jdbc_application_arguments(self, jdbc_conn: Dict[str, Any]) -> Any: if self._jdbc_connection['url']: arguments += [ '-url', - "{0}{1}/{2}".format(jdbc_conn['conn_prefix'], jdbc_conn['url'], jdbc_conn['schema']), + "{}{}/{}".format(jdbc_conn['conn_prefix'], jdbc_conn['url'], jdbc_conn['schema']), ] if self._jdbc_connection['user']: arguments += ['-user', self._jdbc_connection['user']] diff --git a/airflow/providers/apache/spark/hooks/spark_sql.py b/airflow/providers/apache/spark/hooks/spark_sql.py index 8f1cf045cefa2..69ce5a6330104 100644 --- a/airflow/providers/apache/spark/hooks/spark_sql.py +++ b/airflow/providers/apache/spark/hooks/spark_sql.py @@ -137,7 +137,7 @@ def _prepare_command(self, cmd: Union[str, List[str]]) -> List[str]: elif isinstance(cmd, list): connection_cmd += cmd else: - raise AirflowException("Invalid additional command: {}".format(cmd)) + raise AirflowException(f"Invalid additional command: {cmd}") self.log.debug("Spark-Sql cmd: %s", connection_cmd) diff --git a/airflow/providers/apache/spark/hooks/spark_submit.py b/airflow/providers/apache/spark/hooks/spark_submit.py index 27ea7e1da946d..48497990b7b33 100644 --- a/airflow/providers/apache/spark/hooks/spark_submit.py +++ b/airflow/providers/apache/spark/hooks/spark_submit.py @@ -203,7 +203,7 @@ def _resolve_connection(self) -> Dict[str, Any]: # k8s://https://: conn = self.get_connection(self._conn_id) if conn.port: - conn_data['master'] = "{}:{}".format(conn.host, conn.port) + conn_data['master'] = f"{conn.host}:{conn.port}" else: conn_data['master'] = conn.host @@ -644,7 +644,7 @@ def on_kill(self) -> None: self._submit_sp.kill() if self._yarn_application_id: - kill_cmd = "yarn application -kill {}".format(self._yarn_application_id).split() + kill_cmd = f"yarn application -kill {self._yarn_application_id}".split() env = None if self._keytab is not None and self._principal is not None: # we are ignoring renewal failures from renew_from_kt diff --git a/airflow/providers/apache/sqoop/hooks/sqoop.py b/airflow/providers/apache/sqoop/hooks/sqoop.py index 22f743f92e238..d4c8fbb7d8605 100644 --- a/airflow/providers/apache/sqoop/hooks/sqoop.py +++ b/airflow/providers/apache/sqoop/hooks/sqoop.py @@ -112,14 +112,14 @@ def popen(self, cmd: List[str], **kwargs: Any) -> None: self.log.info("Command exited with return code %s", self.sub_process.returncode) if self.sub_process.returncode: - raise AirflowException("Sqoop command failed: {}".format(masked_cmd)) + raise AirflowException(f"Sqoop command failed: {masked_cmd}") def _prepare_command(self, export: bool = False) -> List[str]: sqoop_cmd_type = "export" if export else "import" connection_cmd = ["sqoop", sqoop_cmd_type] for key, value in self.properties.items(): - connection_cmd += ["-D", "{}={}".format(key, value)] + connection_cmd += ["-D", f"{key}={value}"] if self.namenode: connection_cmd += ["-fs", self.namenode] @@ -148,9 +148,9 @@ def _prepare_command(self, export: bool = False) -> List[str]: connect_str = self.conn.host if self.conn.port: - connect_str += ":{}".format(self.conn.port) + connect_str += f":{self.conn.port}" if self.conn.schema: - connect_str += "/{}".format(self.conn.schema) + connect_str += f"/{self.conn.schema}" connection_cmd += ["--connect", connect_str] return connection_cmd @@ -200,7 +200,7 @@ def _import_cmd( if extra_import_options: for key, value in extra_import_options.items(): - cmd += ['--{}'.format(key)] + cmd += [f'--{key}'] if value: cmd += [str(value)] @@ -339,7 +339,7 @@ def _export_cmd( if extra_export_options: for key, value in extra_export_options.items(): - cmd += ['--{}'.format(key)] + cmd += [f'--{key}'] if value: cmd += [str(value)] diff --git a/airflow/providers/celery/sensors/celery_queue.py b/airflow/providers/celery/sensors/celery_queue.py index d426562f32d24..dce8286e316b5 100644 --- a/airflow/providers/celery/sensors/celery_queue.py +++ b/airflow/providers/celery/sensors/celery_queue.py @@ -77,4 +77,4 @@ def poke(self, context: Dict[str, Any]) -> bool: return reserved == 0 and scheduled == 0 and active == 0 except KeyError: - raise KeyError('Could not locate Celery queue {0}'.format(self.celery_queue)) + raise KeyError(f'Could not locate Celery queue {self.celery_queue}') diff --git a/airflow/providers/cloudant/hooks/cloudant.py b/airflow/providers/cloudant/hooks/cloudant.py index 57e126af434b8..e490ffc4cb741 100644 --- a/airflow/providers/cloudant/hooks/cloudant.py +++ b/airflow/providers/cloudant/hooks/cloudant.py @@ -60,6 +60,4 @@ def get_conn(self) -> cloudant: def _validate_connection(self, conn: cloudant) -> None: for conn_param in ['login', 'password']: if not getattr(conn, conn_param): - raise AirflowException( - 'missing connection parameter {conn_param}'.format(conn_param=conn_param) - ) + raise AirflowException(f'missing connection parameter {conn_param}') diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index e3703188cfbaf..4bf38a720556b 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -297,10 +297,10 @@ def execute(self, context) -> Optional[str]: if final_state != State.SUCCESS: status = self.client.read_namespaced_pod(self.name, self.namespace) raise AirflowException( - 'Pod returned a failure: {state}'.format(state=status)) + f'Pod returned a failure: {status}') return result except AirflowException as ex: - raise AirflowException('Pod Launching failed: {error}'.format(error=ex)) + raise AirflowException(f'Pod Launching failed: {ex}') def handle_pod_overlap( self, labels: dict, try_numbers_match: bool, launcher: Any, pod: k8s.V1Pod @@ -319,9 +319,9 @@ def handle_pod_overlap( :param pod_list: list of pods found """ if try_numbers_match: - log_line = "found a running pod with labels {} and the same try_number.".format(labels) + log_line = f"found a running pod with labels {labels} and the same try_number." else: - log_line = "found a running pod with labels {} but a different try_number.".format(labels) + log_line = f"found a running pod with labels {labels} but a different try_number." # In case of failed pods, should reattach the first time, but only once # as the task will have already failed. @@ -331,7 +331,7 @@ def handle_pod_overlap( self.pod = pod final_state, result = self.monitor_launched_pod(launcher, pod) else: - log_line += "creating pod with labels {} and launcher {}".format(labels, launcher) + log_line += f"creating pod with labels {labels} and launcher {launcher}" self.log.info(log_line) final_state, _, result = self.create_new_pod_for_operator(labels, launcher) return final_state, result @@ -476,7 +476,7 @@ def monitor_launched_pod(self, launcher, pod) -> Tuple[State, Optional[str]]: self.log.error("Pod Event: %s - %s", event.reason, event.message) self.patch_already_checked(self.pod) raise AirflowException( - 'Pod returned a failure: {state}'.format(state=final_state) + f'Pod returned a failure: {final_state}' ) return final_state, result diff --git a/airflow/providers/databricks/hooks/databricks.py b/airflow/providers/databricks/hooks/databricks.py index f3b4bef4141fd..623502373f9c8 100644 --- a/airflow/providers/databricks/hooks/databricks.py +++ b/airflow/providers/databricks/hooks/databricks.py @@ -41,7 +41,7 @@ SUBMIT_RUN_ENDPOINT = ('POST', 'api/2.0/jobs/runs/submit') GET_RUN_ENDPOINT = ('GET', 'api/2.0/jobs/runs/get') CANCEL_RUN_ENDPOINT = ('POST', 'api/2.0/jobs/runs/cancel') -USER_AGENT_HEADER = {'user-agent': 'airflow-{v}'.format(v=__version__)} +USER_AGENT_HEADER = {'user-agent': f'airflow-{__version__}'} class RunState: @@ -199,7 +199,7 @@ def _do_api_call(self, endpoint_info, json): # In this case, the user probably made a mistake. # Don't retry. raise AirflowException( - 'Response: {0}, Status Code: {1}'.format(e.response.content, e.response.status_code) + f'Response: {e.response.content}, Status Code: {e.response.status_code}' ) self._log_request_error(attempt_num, e) diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index bbf9b3dcb5bb7..7602de526adda 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -52,12 +52,12 @@ def _deep_string_coerce(content, json_path: str = 'json') -> Union[str, list, di # Databricks can tolerate either numeric or string types in the API backend. return str(content) elif isinstance(content, (list, tuple)): - return [coerce(e, '{0}[{1}]'.format(json_path, i)) for i, e in enumerate(content)] + return [coerce(e, f'{json_path}[{i}]') for i, e in enumerate(content)] elif isinstance(content, dict): - return {k: coerce(v, '{0}[{1}]'.format(json_path, k)) for k, v in list(content.items())} + return {k: coerce(v, f'{json_path}[{k}]') for k, v in list(content.items())} else: param_type = type(content) - msg = 'Type {0} used for parameter {1} is not a number or a string'.format(param_type, json_path) + msg = f'Type {param_type} used for parameter {json_path} is not a number or a string' raise AirflowException(msg) @@ -84,7 +84,7 @@ def _handle_databricks_operator_execution(operator, hook, log, context) -> None: log.info('View run status, Spark UI, and logs at %s', run_page_url) return else: - error_message = '{t} failed with terminal state: {s}'.format(t=operator.task_id, s=run_state) + error_message = f'{operator.task_id} failed with terminal state: {run_state}' raise AirflowException(error_message) else: log.info('%s in run state: %s', operator.task_id, run_state) diff --git a/airflow/providers/dingding/hooks/dingding.py b/airflow/providers/dingding/hooks/dingding.py index b1b71988448fd..45163d7be8769 100644 --- a/airflow/providers/dingding/hooks/dingding.py +++ b/airflow/providers/dingding/hooks/dingding.py @@ -72,7 +72,7 @@ def _get_endpoint(self) -> str: raise AirflowException( 'Dingding token is requests but get nothing, ' 'check you conn_id configuration.' ) - return 'robot/send?access_token={}'.format(token) + return f'robot/send?access_token={token}' def _build_message(self) -> str: """ diff --git a/airflow/providers/docker/hooks/docker.py b/airflow/providers/docker/hooks/docker.py index 9842c259b439c..5ed87d63d22c9 100644 --- a/airflow/providers/docker/hooks/docker.py +++ b/airflow/providers/docker/hooks/docker.py @@ -58,7 +58,7 @@ def __init__( self.__version = version self.__tls = tls if conn.port: - self.__registry = "{}:{}".format(conn.host, conn.port) + self.__registry = f"{conn.host}:{conn.port}" else: self.__registry = conn.host self.__username = conn.login diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py index be544119dc3e2..67262df1b4a12 100644 --- a/airflow/providers/docker/operators/docker.py +++ b/airflow/providers/docker/operators/docker.py @@ -224,7 +224,7 @@ def _run_image(self) -> Optional[str]: self.log.info('Starting docker container from image %s', self.image) with TemporaryDirectory(prefix='airflowtmp', dir=self.host_tmp_dir) as host_tmp_dir: - self.volumes.append('{0}:{1}'.format(host_tmp_dir, self.tmp_dir)) + self.volumes.append(f'{host_tmp_dir}:{self.tmp_dir}') if not self.cli: raise Exception("The 'cli' should be initialized before!") diff --git a/airflow/providers/docker/operators/docker_swarm.py b/airflow/providers/docker/operators/docker_swarm.py index 92936ad1bbf3f..18ecf9c74681b 100644 --- a/airflow/providers/docker/operators/docker_swarm.py +++ b/airflow/providers/docker/operators/docker_swarm.py @@ -127,7 +127,7 @@ def _run_service(self) -> None: resources=types.Resources(mem_limit=self.mem_limit), ), name='airflow-%s' % get_random_string(), - labels={'name': 'airflow__%s__%s' % (self.dag_id, self.task_id)}, + labels={'name': f'airflow__{self.dag_id}__{self.task_id}'}, ) self.log.info('Service started: %s', str(self.service)) diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py b/airflow/providers/elasticsearch/hooks/elasticsearch.py index 7fd7b7b13fd41..8c6d1d2fddf26 100644 --- a/airflow/providers/elasticsearch/hooks/elasticsearch.py +++ b/airflow/providers/elasticsearch/hooks/elasticsearch.py @@ -67,7 +67,7 @@ def get_uri(self) -> str: login = '{conn.login}:{conn.password}@'.format(conn=conn) host = conn.host if conn.port is not None: - host += ':{port}'.format(port=conn.port) + host += f':{conn.port}' uri = '{conn.conn_type}+{conn.schema}://{login}{host}/'.format(conn=conn, login=login, host=host) extras_length = len(conn.extra_dejson) @@ -78,7 +78,7 @@ def get_uri(self) -> str: for arg_key, arg_value in conn.extra_dejson.items(): extras_length -= 1 - uri += "{arg_key}={arg_value}".format(arg_key=arg_key, arg_value=arg_value) + uri += f"{arg_key}={arg_value}" if extras_length: uri += '&' diff --git a/airflow/providers/exasol/hooks/exasol.py b/airflow/providers/exasol/hooks/exasol.py index bcd599a224994..1b01222c8f306 100644 --- a/airflow/providers/exasol/hooks/exasol.py +++ b/airflow/providers/exasol/hooks/exasol.py @@ -48,7 +48,7 @@ def get_conn(self) -> ExaConnection: conn_id = getattr(self, self.conn_name_attr) conn = self.get_connection(conn_id) conn_args = dict( - dsn='%s:%s' % (conn.host, conn.port), + dsn=f'{conn.host}:{conn.port}', user=conn.login, password=conn.password, schema=self.schema or conn.schema, diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/airflow/providers/facebook/ads/hooks/ads.py index 0aee7bbc3599b..1894837d88243 100644 --- a/airflow/providers/facebook/ads/hooks/ads.py +++ b/airflow/providers/facebook/ads/hooks/ads.py @@ -87,7 +87,7 @@ def facebook_ads_config(self) -> Dict: config = conn.extra_dejson missing_keys = self.client_required_fields - config.keys() if missing_keys: - message = "{missing_keys} fields are missing".format(missing_keys=missing_keys) + message = f"{missing_keys} fields are missing" raise AirflowException(message) return config @@ -124,7 +124,7 @@ def bulk_facebook_report( self.log.info("Job run completed") break if async_status in [JobStatus.SKIPPED.value, JobStatus.FAILED.value]: - message = "{async_status}. Please retry.".format(async_status=async_status) + message = f"{async_status}. Please retry." raise AirflowException(message) time.sleep(sleep_time) report_run_id = _async.api_get()["report_run_id"] diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py index 07f900ad07c65..9f69033807311 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py @@ -43,7 +43,7 @@ BQ_LOCATION = "europe-north1" DATASET_NAME = os.environ.get("GCP_BIGQUERY_DATASET_NAME", "test_dataset_operations") -LOCATION_DATASET_NAME = "{}_location".format(DATASET_NAME) +LOCATION_DATASET_NAME = f"{DATASET_NAME}_location" DATA_SAMPLE_GCS_URL = os.environ.get( "GCP_BIGQUERY_DATA_GCS_URL", "gs://cloud-samples-data/bigquery/us-states/us-states.csv", diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_build.py b/airflow/providers/google/cloud/example_dags/example_cloud_build.py index 445fdb07250e5..9a74039296f51 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_build.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_build.py @@ -54,10 +54,10 @@ "steps": [ { "name": "gcr.io/cloud-builders/docker", - "args": ["build", "-t", "gcr.io/$PROJECT_ID/{}".format(GCP_SOURCE_BUCKET_NAME), "."], + "args": ["build", "-t", f"gcr.io/$PROJECT_ID/{GCP_SOURCE_BUCKET_NAME}", "."], } ], - "images": ["gcr.io/$PROJECT_ID/{}".format(GCP_SOURCE_BUCKET_NAME)], + "images": [f"gcr.io/$PROJECT_ID/{GCP_SOURCE_BUCKET_NAME}"], } # [END howto_operator_gcp_create_build_from_storage_body] diff --git a/airflow/providers/google/cloud/example_dags/example_compute.py b/airflow/providers/google/cloud/example_dags/example_compute.py index 5f2d3326610a2..d2b336edff1f0 100644 --- a/airflow/providers/google/cloud/example_dags/example_compute.py +++ b/airflow/providers/google/cloud/example_dags/example_compute.py @@ -82,7 +82,7 @@ project_id=GCP_PROJECT_ID, zone=GCE_ZONE, resource_id=GCE_INSTANCE, - body={'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)}, + body={'machineType': f'zones/{GCE_ZONE}/machineTypes/{GCE_SHORT_MACHINE_TYPE_NAME}'}, task_id='gcp_compute_set_machine_type', ) # [END howto_operator_gce_set_machine_type] @@ -91,7 +91,7 @@ gce_set_machine_type2 = ComputeEngineSetMachineTypeOperator( zone=GCE_ZONE, resource_id=GCE_INSTANCE, - body={'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME)}, + body={'machineType': f'zones/{GCE_ZONE}/machineTypes/{GCE_SHORT_MACHINE_TYPE_NAME}'}, task_id='gcp_compute_set_machine_type2', ) # [END howto_operator_gce_set_machine_type_no_project_id] diff --git a/airflow/providers/google/cloud/example_dags/example_dataproc.py b/airflow/providers/google/cloud/example_dags/example_dataproc.py index abf94ba86b2e2..458aab529d188 100644 --- a/airflow/providers/google/cloud/example_dags/example_dataproc.py +++ b/airflow/providers/google/cloud/example_dags/example_dataproc.py @@ -38,11 +38,11 @@ ZONE = os.environ.get("GCP_REGION", "europe-west1-b") BUCKET = os.environ.get("GCP_DATAPROC_BUCKET", "dataproc-system-tests") OUTPUT_FOLDER = "wordcount" -OUTPUT_PATH = "gs://{}/{}/".format(BUCKET, OUTPUT_FOLDER) +OUTPUT_PATH = f"gs://{BUCKET}/{OUTPUT_FOLDER}/" PYSPARK_MAIN = os.environ.get("PYSPARK_MAIN", "hello_world.py") -PYSPARK_URI = "gs://{}/{}".format(BUCKET, PYSPARK_MAIN) +PYSPARK_URI = f"gs://{BUCKET}/{PYSPARK_MAIN}" SPARKR_MAIN = os.environ.get("SPARKR_MAIN", "hello_world.R") -SPARKR_URI = "gs://{}/{}".format(BUCKET, SPARKR_MAIN) +SPARKR_URI = f"gs://{BUCKET}/{SPARKR_MAIN}" # Cluster definition # [START how_to_cloud_dataproc_create_cluster] diff --git a/airflow/providers/google/cloud/example_dags/example_life_sciences.py b/airflow/providers/google/cloud/example_dags/example_life_sciences.py index 9c26a7203c4ae..3892050403c57 100644 --- a/airflow/providers/google/cloud/example_dags/example_life_sciences.py +++ b/airflow/providers/google/cloud/example_dags/example_life_sciences.py @@ -35,7 +35,7 @@ {"imageUri": "bash", "commands": ["-c", "echo Hello, world"]}, ], "resources": { - "regions": ["{}".format(LOCATION)], + "regions": [f"{LOCATION}"], "virtualMachine": { "machineType": "n1-standard-1", }, @@ -50,7 +50,7 @@ "actions": [ { "imageUri": "google/cloud-sdk", - "commands": ["gsutil", "cp", "gs://{}/{}".format(BUCKET, FILENAME), "/tmp"], + "commands": ["gsutil", "cp", f"gs://{BUCKET}/{FILENAME}", "/tmp"], }, {"imageUri": "bash", "commands": ["-c", "echo Hello, world"]}, { @@ -58,13 +58,13 @@ "commands": [ "gsutil", "cp", - "gs://{}/{}".format(BUCKET, FILENAME), - "gs://{}/output.in".format(BUCKET), + f"gs://{BUCKET}/{FILENAME}", + f"gs://{BUCKET}/output.in", ], }, ], "resources": { - "regions": ["{}".format(LOCATION)], + "regions": [f"{LOCATION}"], "virtualMachine": { "machineType": "n1-standard-1", }, diff --git a/airflow/providers/google/cloud/example_dags/example_mlengine.py b/airflow/providers/google/cloud/example_dags/example_mlengine.py index 55c2da37cf008..43a0e469d85a1 100644 --- a/airflow/providers/google/cloud/example_dags/example_mlengine.py +++ b/airflow/providers/google/cloud/example_dags/example_mlengine.py @@ -113,7 +113,7 @@ version={ "name": "v1", "description": "First-version", - "deployment_uri": '{}/keras_export/'.format(JOB_DIR), + "deployment_uri": f'{JOB_DIR}/keras_export/', "runtime_version": "1.15", "machineType": "mls1-c1-m2", "framework": "TENSORFLOW", @@ -220,11 +220,11 @@ def validate_err_and_count(summary: Dict) -> Dict: Validate summary result """ if summary['val'] > 1: - raise ValueError('Too high val>1; summary={}'.format(summary)) + raise ValueError(f'Too high val>1; summary={summary}') if summary['val'] < 0: - raise ValueError('Too low val<0; summary={}'.format(summary)) + raise ValueError(f'Too low val<0; summary={summary}') if summary['count'] != 20: - raise ValueError('Invalid value val != 20; summary={}'.format(summary)) + raise ValueError(f'Invalid value val != 20; summary={summary}') return summary # [END howto_operator_gcp_mlengine_validate_error] diff --git a/airflow/providers/google/cloud/example_dags/example_speech_to_text.py b/airflow/providers/google/cloud/example_dags/example_speech_to_text.py index 58a5b814fe53a..66778c741a01b 100644 --- a/airflow/providers/google/cloud/example_dags/example_speech_to_text.py +++ b/airflow/providers/google/cloud/example_dags/example_speech_to_text.py @@ -38,7 +38,7 @@ # [START howto_operator_speech_to_text_api_arguments] CONFIG = {"encoding": "LINEAR16", "language_code": "en_US"} -AUDIO = {"uri": "gs://{bucket}/{object}".format(bucket=BUCKET_NAME, object=FILENAME)} +AUDIO = {"uri": f"gs://{BUCKET_NAME}/{FILENAME}"} # [END howto_operator_speech_to_text_api_arguments] with models.DAG( diff --git a/airflow/providers/google/cloud/example_dags/example_tasks.py b/airflow/providers/google/cloud/example_dags/example_tasks.py index 9b029546e045a..207599b0a56ee 100644 --- a/airflow/providers/google/cloud/example_dags/example_tasks.py +++ b/airflow/providers/google/cloud/example_dags/example_tasks.py @@ -49,7 +49,7 @@ "app_engine_http_request": { # Specify the type of request. "http_method": "POST", "relative_uri": "/example_task_handler", - "body": "Hello".encode(), + "body": b"Hello", }, "schedule_time": timestamp, } diff --git a/airflow/providers/google/cloud/example_dags/example_translate_speech.py b/airflow/providers/google/cloud/example_dags/example_translate_speech.py index 61c7579b0fd7a..f8b47842ffa43 100644 --- a/airflow/providers/google/cloud/example_dags/example_translate_speech.py +++ b/airflow/providers/google/cloud/example_dags/example_translate_speech.py @@ -38,7 +38,7 @@ # [START howto_operator_translate_speech_arguments] CONFIG = {"encoding": "LINEAR16", "language_code": "en_US"} -AUDIO = {"uri": "gs://{bucket}/{object}".format(bucket=BUCKET_NAME, object=FILENAME)} +AUDIO = {"uri": f"gs://{BUCKET_NAME}/{FILENAME}"} TARGET_LANGUAGE = 'pl' FORMAT = 'text' MODEL = 'base' diff --git a/airflow/providers/google/cloud/example_dags/example_video_intelligence.py b/airflow/providers/google/cloud/example_dags/example_video_intelligence.py index 05a3623f994bb..dbb63723d5921 100644 --- a/airflow/providers/google/cloud/example_dags/example_video_intelligence.py +++ b/airflow/providers/google/cloud/example_dags/example_video_intelligence.py @@ -43,7 +43,7 @@ # [START howto_operator_video_intelligence_other_args] -INPUT_URI = "gs://{}/video.mp4".format(GCP_BUCKET_NAME) +INPUT_URI = f"gs://{GCP_BUCKET_NAME}/video.mp4" # [END howto_operator_video_intelligence_other_args] diff --git a/airflow/providers/google/cloud/hooks/bigquery.py b/airflow/providers/google/cloud/hooks/bigquery.py index fa0d627dfe373..96e40560aa439 100644 --- a/airflow/providers/google/cloud/hooks/bigquery.py +++ b/airflow/providers/google/cloud/hooks/bigquery.py @@ -1661,8 +1661,8 @@ def run_load( # pylint: disable=too-many-locals,too-many-arguments,invalid-name ] if source_format not in allowed_formats: raise ValueError( - "{0} is not a valid source format. " - "Please use one of the following types: {1}".format(source_format, allowed_formats) + "{} is not a valid source format. " + "Please use one of the following types: {}".format(source_format, allowed_formats) ) # bigquery also allows you to define how you want a table's schema to change @@ -1672,8 +1672,8 @@ def run_load( # pylint: disable=too-many-locals,too-many-arguments,invalid-name allowed_schema_update_options = ['ALLOW_FIELD_ADDITION', "ALLOW_FIELD_RELAXATION"] if not set(allowed_schema_update_options).issuperset(set(schema_update_options)): raise ValueError( - "{0} contains invalid schema update options." - "Please only use one or more of the following options: {1}".format( + "{} contains invalid schema update options." + "Please only use one or more of the following options: {}".format( schema_update_options, allowed_schema_update_options ) ) @@ -2078,9 +2078,9 @@ def run_query( if not set(allowed_schema_update_options).issuperset(set(schema_update_options)): raise ValueError( - "{0} contains invalid schema update options. " + "{} contains invalid schema update options. " "Please only use one or more of the following " - "options: {1}".format(schema_update_options, allowed_schema_update_options) + "options: {}".format(schema_update_options, allowed_schema_update_options) ) if schema_update_options: @@ -2790,7 +2790,7 @@ def _bq_cast(string_field: str, bq_type: str) -> Union[None, int, float, bool, s return float(string_field) elif bq_type == 'BOOLEAN': if string_field not in ['true', 'false']: - raise ValueError("{} must have value 'true' or 'false'".format(string_field)) + raise ValueError(f"{string_field} must have value 'true' or 'false'") return string_field == 'true' else: return string_field @@ -2812,7 +2812,7 @@ def var_print(var_name): if var_name is None: return "" else: - return "Format exception for {var}: ".format(var=var_name) + return f"Format exception for {var_name}: " if table_input.count('.') + table_input.count(':') > 3: raise Exception( @@ -2927,6 +2927,6 @@ def _validate_src_fmt_configs( for k, v in src_fmt_configs.items(): if k not in valid_configs: - raise ValueError("{0} is not a valid src_fmt_configs for type {1}.".format(k, source_format)) + raise ValueError(f"{k} is not a valid src_fmt_configs for type {source_format}.") return src_fmt_configs diff --git a/airflow/providers/google/cloud/hooks/cloud_sql.py b/airflow/providers/google/cloud/hooks/cloud_sql.py index f26b76285a856..0443be78f3905 100644 --- a/airflow/providers/google/cloud/hooks/cloud_sql.py +++ b/airflow/providers/google/cloud/hooks/cloud_sql.py @@ -358,7 +358,7 @@ def import_instance(self, instance: str, body: Dict, project_id: str) -> None: operation_name = response["name"] self._wait_for_operation_to_complete(project_id=project_id, operation_name=operation_name) except HttpError as ex: - raise AirflowException('Importing instance {} failed: {}'.format(instance, ex.content)) + raise AirflowException(f'Importing instance {instance} failed: {ex.content}') def _wait_for_operation_to_complete(self, project_id: str, operation_name: str) -> None: """ @@ -544,7 +544,7 @@ def start_proxy(self) -> None: """ self._download_sql_proxy_if_needed() if self.sql_proxy_process: - raise AirflowException("The sql proxy is already running: {}".format(self.sql_proxy_process)) + raise AirflowException(f"The sql proxy is already running: {self.sql_proxy_process}") else: command_to_run = [self.sql_proxy_path] command_to_run.extend(self.command_line_parameters) @@ -564,13 +564,13 @@ def start_proxy(self) -> None: if line == '' and return_code is not None: self.sql_proxy_process = None raise AirflowException( - "The cloud_sql_proxy finished early with return code {}!".format(return_code) + f"The cloud_sql_proxy finished early with return code {return_code}!" ) if line != '': self.log.info(line) if "googleapi: Error" in line or "invalid instance name:" in line: self.stop_proxy() - raise AirflowException("Error when starting the cloud_sql_proxy {}!".format(line)) + raise AirflowException(f"Error when starting the cloud_sql_proxy {line}!") if "Ready for new connections" in line: return @@ -758,11 +758,9 @@ def _get_bool(val: Any) -> bool: @staticmethod def _check_ssl_file(file_to_check, name) -> None: if not file_to_check: - raise AirflowException("SSL connections requires {name} to be set".format(name=name)) + raise AirflowException(f"SSL connections requires {name} to be set") if not os.path.isfile(file_to_check): - raise AirflowException( - "The {file_to_check} must be a readable file".format(file_to_check=file_to_check) - ) + raise AirflowException(f"The {file_to_check} must be a readable file") def _validate_inputs(self) -> None: if self.project_id == '': diff --git a/airflow/providers/google/cloud/hooks/compute.py b/airflow/providers/google/cloud/hooks/compute.py index 960bbdaf9e721..684ce2ab8b151 100644 --- a/airflow/providers/google/cloud/hooks/compute.py +++ b/airflow/providers/google/cloud/hooks/compute.py @@ -355,7 +355,7 @@ def _wait_for_operation_to_complete( msg = operation_response.get("httpErrorMessage") # Extracting the errors list as string and trimming square braces error_msg = str(error.get("errors"))[1:-1] - raise AirflowException("{} {}: ".format(code, msg) + error_msg) + raise AirflowException(f"{code} {msg}: " + error_msg) break time.sleep(TIME_TO_SLEEP_IN_SECONDS) diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py index 186c1dd82713f..cf9f559598e89 100644 --- a/airflow/providers/google/cloud/hooks/dataflow.py +++ b/airflow/providers/google/cloud/hooks/dataflow.py @@ -418,7 +418,7 @@ def wait_for_done(self) -> Optional[str]: self.log.info("Process exited with return code: %s", self._proc.returncode) if self._proc.returncode != 0: - raise Exception("DataFlow failed with return code {}".format(self._proc.returncode)) + raise Exception(f"DataFlow failed with return code {self._proc.returncode}") return self.job_id @@ -757,7 +757,7 @@ def start_python_dataflow( # pylint: disable=too-many-arguments variables['region'] = location def label_formatter(labels_dict): - return ['--labels={}={}'.format(key, value) for key, value in labels_dict.items()] + return [f'--labels={key}={value}' for key, value in labels_dict.items()] if py_requirements is not None: if not py_requirements and not py_system_site_packages: @@ -826,7 +826,7 @@ def _build_dataflow_job_name(job_name: str, append_job_name: bool = True) -> str def _build_cmd(variables: dict, label_formatter: Callable, project_id: str) -> List[str]: command = [ "--runner=DataflowRunner", - "--project={}".format(project_id), + f"--project={project_id}", ] if variables is None: return command diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py index eca2ebbb0ef4e..c8f3644825239 100644 --- a/airflow/providers/google/cloud/hooks/dataproc.py +++ b/airflow/providers/google/cloud/hooks/dataproc.py @@ -212,9 +212,7 @@ class DataprocHook(GoogleBaseHook): def get_cluster_client(self, location: Optional[str] = None) -> ClusterControllerClient: """Returns ClusterControllerClient.""" - client_options = ( - {'api_endpoint': '{}-dataproc.googleapis.com:443'.format(location)} if location else None - ) + client_options = {'api_endpoint': f'{location}-dataproc.googleapis.com:443'} if location else None return ClusterControllerClient( credentials=self._get_credentials(), client_info=self.client_info, client_options=client_options @@ -229,9 +227,7 @@ def get_template_client(self) -> WorkflowTemplateServiceClient: def get_job_client(self, location: Optional[str] = None) -> JobControllerClient: """Returns JobControllerClient.""" - client_options = ( - {'api_endpoint': '{}-dataproc.googleapis.com:443'.format(location)} if location else None - ) + client_options = {'api_endpoint': f'{location}-dataproc.googleapis.com:443'} if location else None return JobControllerClient( credentials=self._get_credentials(), client_info=self.client_info, client_options=client_options @@ -735,9 +731,9 @@ def wait_for_job( self.log.info("Retrying. Dataproc API returned server error when waiting for job: %s", err) if state == JobStatus.ERROR: - raise AirflowException('Job failed:\n{}'.format(job)) + raise AirflowException(f'Job failed:\n{job}') if state == JobStatus.CANCELLED: - raise AirflowException('Job was cancelled:\n{}'.format(job)) + raise AirflowException(f'Job was cancelled:\n{job}') @GoogleBaseHook.fallback_to_default_project_id def get_job( diff --git a/airflow/providers/google/cloud/hooks/dlp.py b/airflow/providers/google/cloud/hooks/dlp.py index e88b655b69987..7cf5da23586b3 100644 --- a/airflow/providers/google/cloud/hooks/dlp.py +++ b/airflow/providers/google/cloud/hooks/dlp.py @@ -260,7 +260,7 @@ def create_dlp_job( if match is not None: job_name = match.groupdict()["job"] else: - raise AirflowException("Unable to retrieve DLP job's ID from {}.".format(job.name)) + raise AirflowException(f"Unable to retrieve DLP job's ID from {job.name}.") while wait_until_finished: job = self.get_dlp_job(dlp_job_id=job_name, project_id=project_id) diff --git a/airflow/providers/google/cloud/hooks/functions.py b/airflow/providers/google/cloud/hooks/functions.py index a4efe099950dc..8cb93dfadd4b6 100644 --- a/airflow/providers/google/cloud/hooks/functions.py +++ b/airflow/providers/google/cloud/hooks/functions.py @@ -65,7 +65,7 @@ def _full_location(project_id: str, location: str) -> str: :type location: str :return: """ - return 'projects/{}/locations/{}'.format(project_id, location) + return f'projects/{project_id}/locations/{location}' def get_conn(self) -> build: """ diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/airflow/providers/google/cloud/hooks/gcs.py index 8740a44b5bab2..6d2f3480fcec8 100644 --- a/airflow/providers/google/cloud/hooks/gcs.py +++ b/airflow/providers/google/cloud/hooks/gcs.py @@ -414,7 +414,7 @@ def get_blob_update_time(self, bucket_name: str, object_name: str): bucket = client.bucket(bucket_name) blob = bucket.get_blob(blob_name=object_name) if blob is None: - raise ValueError("Object ({}) not found in Bucket ({})".format(object_name, bucket_name)) + raise ValueError(f"Object ({object_name}) not found in Bucket ({bucket_name})") return blob.updated def is_updated_after(self, bucket_name: str, object_name: str, ts: datetime) -> bool: diff --git a/airflow/providers/google/cloud/hooks/mlengine.py b/airflow/providers/google/cloud/hooks/mlengine.py index a843b3bf03fb0..b17cc46fef257 100644 --- a/airflow/providers/google/cloud/hooks/mlengine.py +++ b/airflow/providers/google/cloud/hooks/mlengine.py @@ -57,7 +57,7 @@ def _poll_with_exponential_delay(request, execute_num_retries, max_n, is_done_fu try: response = request.execute(num_retries=execute_num_retries) if is_error_func(response): - raise ValueError('The response contained an error: {}'.format(response)) + raise ValueError(f'The response contained an error: {response}') if is_done_func(response): log.info('Operation is done: %s', response) return response @@ -70,7 +70,7 @@ def _poll_with_exponential_delay(request, execute_num_retries, max_n, is_done_fu else: time.sleep((2 ** i) + (random.randint(0, 1000) / 1000)) - raise ValueError('Connection could not be established after {} retries.'.format(max_n)) + raise ValueError(f'Connection could not be established after {max_n} retries.') class MLEngineHook(GoogleBaseHook): @@ -128,7 +128,7 @@ def create_job(self, job: dict, project_id: str, use_existing_job_fn: Optional[C self._append_label(job) self.log.info("Creating job.") # pylint: disable=no-member - request = hook.projects().jobs().create(parent='projects/{}'.format(project_id), body=job) + request = hook.projects().jobs().create(parent=f'projects/{project_id}', body=job) job_id = job['jobId'] try: @@ -203,7 +203,7 @@ def _get_job(self, project_id: str, job_id: str) -> dict: :raises: googleapiclient.errors.HttpError """ hook = self.get_conn() - job_name = 'projects/{}/jobs/{}'.format(project_id, job_id) + job_name = f'projects/{project_id}/jobs/{job_id}' request = hook.projects().jobs().get(name=job_name) # pylint: disable=no-member while True: try: @@ -266,7 +266,7 @@ def create_version( :rtype: dict """ hook = self.get_conn() - parent_name = 'projects/{}/models/{}'.format(project_id, model_name) + parent_name = f'projects/{project_id}/models/{model_name}' self._append_label(version_spec) @@ -307,7 +307,7 @@ def set_default_version( :raises: googleapiclient.errors.HttpError """ hook = self.get_conn() - full_version_name = 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name) + full_version_name = f'projects/{project_id}/models/{model_name}/versions/{version_name}' # pylint: disable=no-member request = hook.projects().models().versions().setDefault(name=full_version_name, body={}) @@ -340,7 +340,7 @@ def list_versions( """ hook = self.get_conn() result = [] # type: List[Dict] - full_parent_name = 'projects/{}/models/{}'.format(project_id, model_name) + full_parent_name = f'projects/{project_id}/models/{model_name}' # pylint: disable=no-member request = hook.projects().models().versions().list(parent=full_parent_name, pageSize=100) @@ -378,7 +378,7 @@ def delete_version( :rtype: Dict """ hook = self.get_conn() - full_name = 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name) + full_name = f'projects/{project_id}/models/{model_name}/versions/{version_name}' delete_request = ( hook.projects().models().versions().delete(name=full_name) # pylint: disable=no-member ) @@ -415,7 +415,7 @@ def create_model( hook = self.get_conn() if 'name' not in model or not model['name']: raise ValueError("Model name must be provided and " "could not be an empty string") - project = 'projects/{}'.format(project_id) + project = f'projects/{project_id}' self._append_label(model) try: @@ -467,7 +467,7 @@ def get_model( hook = self.get_conn() if not model_name: raise ValueError("Model name must be provided and " "it could not be an empty string") - full_model_name = 'projects/{}/models/{}'.format(project_id, model_name) + full_model_name = f'projects/{project_id}/models/{model_name}' request = hook.projects().models().get(name=full_model_name) # pylint: disable=no-member try: return request.execute(num_retries=self.num_retries) @@ -502,7 +502,7 @@ def delete_model( if not model_name: raise ValueError("Model name must be provided and it could not be an empty string") - model_path = 'projects/{}/models/{}'.format(project_id, model_name) + model_path = f'projects/{project_id}/models/{model_name}' if delete_contents: self._delete_all_versions(model_name, project_id) request = hook.projects().models().delete(name=model_path) # pylint: disable=no-member diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py index 3a62ce73e6085..f2ae19089a149 100644 --- a/airflow/providers/google/cloud/hooks/pubsub.py +++ b/airflow/providers/google/cloud/hooks/pubsub.py @@ -227,9 +227,9 @@ def create_topic( except AlreadyExists: self.log.warning('Topic already exists: %s', topic) if fail_if_exists: - raise PubSubException('Topic already exists: {}'.format(topic)) + raise PubSubException(f'Topic already exists: {topic}') except GoogleAPICallError as e: - raise PubSubException('Error creating topic {}'.format(topic), e) + raise PubSubException(f'Error creating topic {topic}', e) self.log.info("Created topic (path) %s", topic_path) @@ -280,9 +280,9 @@ def delete_topic( except NotFound: self.log.warning('Topic does not exist: %s', topic_path) if fail_if_not_exists: - raise PubSubException('Topic does not exist: {}'.format(topic_path)) + raise PubSubException(f'Topic does not exist: {topic_path}') except GoogleAPICallError as e: - raise PubSubException('Error deleting topic {}'.format(topic), e) + raise PubSubException(f'Error deleting topic {topic}', e) self.log.info("Deleted topic (path) %s", topic_path) # pylint: disable=too-many-arguments @@ -392,7 +392,7 @@ def create_subscription( subscriber = self.subscriber_client if not subscription: - subscription = 'sub-{}'.format(uuid4()) + subscription = f'sub-{uuid4()}' if not subscription_project_id: subscription_project_id = project_id @@ -426,9 +426,9 @@ def create_subscription( except AlreadyExists: self.log.warning('Subscription already exists: %s', subscription_path) if fail_if_exists: - raise PubSubException('Subscription already exists: {}'.format(subscription_path)) + raise PubSubException(f'Subscription already exists: {subscription_path}') except GoogleAPICallError as e: - raise PubSubException('Error creating subscription {}'.format(subscription_path), e) + raise PubSubException(f'Error creating subscription {subscription_path}', e) self.log.info("Created subscription (path) %s for topic (path) %s", subscription_path, topic_path) return subscription @@ -478,9 +478,9 @@ def delete_subscription( except NotFound: self.log.warning('Subscription does not exist: %s', subscription_path) if fail_if_not_exists: - raise PubSubException('Subscription does not exist: {}'.format(subscription_path)) + raise PubSubException(f'Subscription does not exist: {subscription_path}') except GoogleAPICallError as e: - raise PubSubException('Error deleting subscription {}'.format(subscription_path), e) + raise PubSubException(f'Error deleting subscription {subscription_path}', e) self.log.info("Deleted subscription (path) %s", subscription_path) @@ -544,7 +544,7 @@ def pull( self.log.info("Pulled %d messages from subscription (path) %s", len(result), subscription_path) return result except (HttpError, GoogleAPICallError) as e: - raise PubSubException('Error pulling messages from subscription {}'.format(subscription_path), e) + raise PubSubException(f'Error pulling messages from subscription {subscription_path}', e) @GoogleBaseHook.fallback_to_default_project_id def acknowledge( diff --git a/airflow/providers/google/cloud/hooks/spanner.py b/airflow/providers/google/cloud/hooks/spanner.py index 6344f3769d30a..90cdb3255e099 100644 --- a/airflow/providers/google/cloud/hooks/spanner.py +++ b/airflow/providers/google/cloud/hooks/spanner.py @@ -247,9 +247,7 @@ def get_database( """ instance = self._get_client(project_id=project_id).instance(instance_id=instance_id) if not instance.exists(): - raise AirflowException( - "The instance {} does not exist in project {} !".format(instance_id, project_id) - ) + raise AirflowException(f"The instance {instance_id} does not exist in project {project_id} !") database = instance.database(database_id=database_id) if not database.exists(): return None @@ -281,9 +279,7 @@ def create_database( """ instance = self._get_client(project_id=project_id).instance(instance_id=instance_id) if not instance.exists(): - raise AirflowException( - "The instance {} does not exist in project {} !".format(instance_id, project_id) - ) + raise AirflowException(f"The instance {instance_id} does not exist in project {project_id} !") database = instance.database(database_id=database_id, ddl_statements=ddl_statements) try: operation = database.create() # type: Operation @@ -324,9 +320,7 @@ def update_database( """ instance = self._get_client(project_id=project_id).instance(instance_id=instance_id) if not instance.exists(): - raise AirflowException( - "The instance {} does not exist in project {} !".format(instance_id, project_id) - ) + raise AirflowException(f"The instance {instance_id} does not exist in project {project_id} !") database = instance.database(database_id=database_id) try: operation = database.update_ddl(ddl_statements=ddl_statements, operation_id=operation_id) @@ -363,9 +357,7 @@ def delete_database(self, instance_id: str, database_id, project_id: str) -> boo """ instance = self._get_client(project_id=project_id).instance(instance_id=instance_id) if not instance.exists(): - raise AirflowException( - "The instance {} does not exist in project {} !".format(instance_id, project_id) - ) + raise AirflowException(f"The instance {instance_id} does not exist in project {project_id} !") database = instance.database(database_id=database_id) if not database.exists(): self.log.info( diff --git a/airflow/providers/google/cloud/hooks/stackdriver.py b/airflow/providers/google/cloud/hooks/stackdriver.py index 06a62e228a9b4..9da1afa409a36 100644 --- a/airflow/providers/google/cloud/hooks/stackdriver.py +++ b/airflow/providers/google/cloud/hooks/stackdriver.py @@ -110,7 +110,7 @@ def list_alert_policies( """ client = self._get_policy_client() policies_ = client.list_alert_policies( - name='projects/{project_id}'.format(project_id=project_id), + name=f'projects/{project_id}', filter_=filter_, order_by=order_by, page_size=page_size, @@ -288,7 +288,7 @@ def upsert_alert( old_name = channel.name channel.ClearField('name') new_channel = channel_client.create_notification_channel( - name='projects/{project_id}'.format(project_id=project_id), + name=f'projects/{project_id}', notification_channel=channel, retry=retry, timeout=timeout, @@ -317,7 +317,7 @@ def upsert_alert( for condition in policy.conditions: condition.ClearField('name') policy_client.create_alert_policy( - name='projects/{project_id}'.format(project_id=project_id), + name=f'projects/{project_id}', alert_policy=policy, retry=retry, timeout=timeout, @@ -351,7 +351,7 @@ def delete_alert_policy( try: policy_client.delete_alert_policy(name=name, retry=retry, timeout=timeout, metadata=metadata) except HttpError as err: - raise AirflowException('Delete alerting policy failed. Error was {}'.format(err.content)) + raise AirflowException(f'Delete alerting policy failed. Error was {err.content}') @GoogleBaseHook.fallback_to_default_project_id def list_notification_channels( @@ -405,7 +405,7 @@ def list_notification_channels( """ client = self._get_channel_client() channels = client.list_notification_channels( - name='projects/{project_id}'.format(project_id=project_id), + name=f'projects/{project_id}', filter_=filter_, order_by=order_by, page_size=page_size, @@ -431,9 +431,7 @@ def _toggle_channel_status( metadata: Optional[str] = None, ) -> None: client = self._get_channel_client() - channels = client.list_notification_channels( - name='projects/{project_id}'.format(project_id=project_id), filter_=filter_ - ) + channels = client.list_notification_channels(name=f'projects/{project_id}', filter_=filter_) for channel in channels: if channel.enabled.value != bool(new_state): channel.enabled.value = bool(new_state) @@ -582,7 +580,7 @@ def upsert_channel( old_name = channel.name channel.ClearField('name') new_channel = channel_client.create_notification_channel( - name='projects/{project_id}'.format(project_id=project_id), + name=f'projects/{project_id}', notification_channel=channel, retry=retry, timeout=timeout, @@ -621,4 +619,4 @@ def delete_notification_channel( name=name, retry=retry, timeout=timeout, metadata=metadata ) except HttpError as err: - raise AirflowException('Delete notification channel failed. Error was {}'.format(err.content)) + raise AirflowException(f'Delete notification channel failed. Error was {err.content}') diff --git a/airflow/providers/google/cloud/hooks/vision.py b/airflow/providers/google/cloud/hooks/vision.py index b0d5a132de596..1407935e27813 100644 --- a/airflow/providers/google/cloud/hooks/vision.py +++ b/airflow/providers/google/cloud/hooks/vision.py @@ -672,7 +672,7 @@ def _get_autogenerated_id(response) -> str: try: name = response.name except AttributeError as e: - raise AirflowException('Unable to get name from response... [{}]\n{}'.format(response, e)) + raise AirflowException(f'Unable to get name from response... [{response}]\n{e}') if '/' not in name: - raise AirflowException('Unable to get id from name... [{}]'.format(name)) + raise AirflowException(f'Unable to get id from name... [{name}]') return name.rsplit('/', 1)[1] diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index eac4e22299fcf..9fd456d6e1260 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -127,7 +127,7 @@ def close(self): remote_loc = os.path.join(self.remote_base, self.log_relative_path) if os.path.exists(local_loc): # read log and remove old logs to get just the latest additions - with open(local_loc, 'r') as logfile: + with open(local_loc) as logfile: log = logfile.read() self.gcs_write(log, remote_loc) @@ -153,7 +153,7 @@ def _read(self, ti, try_number, metadata=None): try: blob = storage.Blob.from_string(remote_loc, self.client) remote_log = blob.download_as_string() - log = '*** Reading remote log from {}.\n{}\n'.format(remote_loc, remote_log) + log = f'*** Reading remote log from {remote_loc}.\n{remote_log}\n' return log, {'end_of_log': True} except Exception as e: # pylint: disable=broad-except log = '*** Unable to read remote log from {}\n*** {}\n\n'.format(remote_loc, str(e)) diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py index 45c6269734df5..be75fcd973e7f 100644 --- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py +++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py @@ -164,7 +164,7 @@ def read( :rtype: Tuple[List[str], List[Dict]] """ if try_number is not None and try_number < 1: - logs = ["Error fetching the logs. Try number {} is invalid.".format(try_number)] + logs = [f"Error fetching the logs. Try number {try_number} is invalid."] return logs, [{"end_of_log": "true"}] if not metadata: diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py index 83aaa0c9328f7..8f6ac11d98c68 100644 --- a/airflow/providers/google/cloud/operators/bigquery.py +++ b/airflow/providers/google/cloud/operators/bigquery.py @@ -2025,7 +2025,7 @@ def __init__( def prepare_template(self) -> None: # If .json is passed then we have to read the file if isinstance(self.configuration, str) and self.configuration.endswith('.json'): - with open(self.configuration, 'r') as file: + with open(self.configuration) as file: self.configuration = json.loads(file.read()) def _submit_job( diff --git a/airflow/providers/google/cloud/operators/bigtable.py b/airflow/providers/google/cloud/operators/bigtable.py index ad375c29b7492..3fd5d99351b6c 100644 --- a/airflow/providers/google/cloud/operators/bigtable.py +++ b/airflow/providers/google/cloud/operators/bigtable.py @@ -37,7 +37,7 @@ class BigtableValidationMixin: def _validate_inputs(self): for attr_name in self.REQUIRED_ATTRIBUTES: if not getattr(self, attr_name): - raise AirflowException('Empty parameter: {}'.format(attr_name)) + raise AirflowException(f'Empty parameter: {attr_name}') class BigtableCreateInstanceOperator(BaseOperator, BigtableValidationMixin): @@ -462,7 +462,7 @@ def execute(self, context) -> None: except google.api_core.exceptions.AlreadyExists: if not self._compare_column_families(hook, instance): raise AirflowException( - "Table '{}' already exists with different Column Families.".format(self.table_id) + f"Table '{self.table_id}' already exists with different Column Families." ) self.log.info("The table '%s' already exists. Consider it as created", self.table_id) @@ -536,7 +536,7 @@ def execute(self, context) -> None: ) instance = hook.get_instance(project_id=self.project_id, instance_id=self.instance_id) if not instance: - raise AirflowException("Dependency: instance '{}' does not exist.".format(self.instance_id)) + raise AirflowException(f"Dependency: instance '{self.instance_id}' does not exist.") try: hook.delete_table( @@ -622,7 +622,7 @@ def execute(self, context) -> None: ) instance = hook.get_instance(project_id=self.project_id, instance_id=self.instance_id) if not instance: - raise AirflowException("Dependency: instance '{}' does not exist.".format(self.instance_id)) + raise AirflowException(f"Dependency: instance '{self.instance_id}' does not exist.") try: hook.update_cluster(instance=instance, cluster_id=self.cluster_id, nodes=self.nodes) diff --git a/airflow/providers/google/cloud/operators/cloud_build.py b/airflow/providers/google/cloud/operators/cloud_build.py index 5126e358dbfd2..101c04dbf2dc5 100644 --- a/airflow/providers/google/cloud/operators/cloud_build.py +++ b/airflow/providers/google/cloud/operators/cloud_build.py @@ -222,7 +222,7 @@ def prepare_template(self) -> None: # if no file is specified, skip if not isinstance(self.body_raw, str): return - with open(self.body_raw, 'r') as file: + with open(self.body_raw) as file: if any(self.body_raw.endswith(ext) for ext in ['.yaml', '.yml']): self.body = yaml.load(file.read(), Loader=yaml.FullLoader) if self.body_raw.endswith('.json'): diff --git a/airflow/providers/google/cloud/operators/cloud_sql.py b/airflow/providers/google/cloud/operators/cloud_sql.py index f3eeaf23062fa..f43625314fc44 100644 --- a/airflow/providers/google/cloud/operators/cloud_sql.py +++ b/airflow/providers/google/cloud/operators/cloud_sql.py @@ -518,7 +518,7 @@ def execute(self, context) -> Optional[bool]: impersonation_chain=self.impersonation_chain, ) if not self._check_if_instance_exists(self.instance, hook): - print("Cloud SQL instance with ID {} does not exist. Aborting delete.".format(self.instance)) + print(f"Cloud SQL instance with ID {self.instance} does not exist. Aborting delete.") return True else: return hook.delete_instance(project_id=self.project_id, instance=self.instance) diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index 909d32759b8b8..c8f1f1a88ec2b 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -1476,7 +1476,7 @@ def _upload_file_temp(self, bucket, local_file): mime_type='application/x-python', filename=local_file, ) - return "gs://{}/{}".format(bucket, temp_filename) + return f"gs://{bucket}/{temp_filename}" @apply_defaults def __init__( @@ -1517,7 +1517,7 @@ def generate_job(self): project_id=self.hook.project_id, region=self.region, cluster_name=self.cluster_name ) bucket = cluster_info['config']['config_bucket'] - self.main = "gs://{}/{}".format(bucket, self.main) + self.main = f"gs://{bucket}/{self.main}" self.job_template.set_python_main(self.main) self.job_template.add_args(self.arguments) self.job_template.add_archive_uris(self.archives) diff --git a/airflow/providers/google/cloud/operators/datastore.py b/airflow/providers/google/cloud/operators/datastore.py index 9a5a572ac5c14..234a289643377 100644 --- a/airflow/providers/google/cloud/operators/datastore.py +++ b/airflow/providers/google/cloud/operators/datastore.py @@ -137,7 +137,7 @@ def execute(self, context) -> dict: state = result['metadata']['common']['state'] if state != 'SUCCESSFUL': - raise AirflowException('Operation failed: result={}'.format(result)) + raise AirflowException(f'Operation failed: result={result}') return result @@ -242,7 +242,7 @@ def execute(self, context): state = result['metadata']['common']['state'] if state != 'SUCCESSFUL': - raise AirflowException('Operation failed: result={}'.format(result)) + raise AirflowException(f'Operation failed: result={result}') return result diff --git a/airflow/providers/google/cloud/operators/functions.py b/airflow/providers/google/cloud/operators/functions.py index 2b4d641a44421..15bd2d54df3d5 100644 --- a/airflow/providers/google/cloud/operators/functions.py +++ b/airflow/providers/google/cloud/operators/functions.py @@ -385,7 +385,7 @@ def _validate_inputs(self) -> None: else: pattern = FUNCTION_NAME_COMPILED_PATTERN if not pattern.match(self.name): - raise AttributeError('Parameter name must match pattern: {}'.format(FUNCTION_NAME_PATTERN)) + raise AttributeError(f'Parameter name must match pattern: {FUNCTION_NAME_PATTERN}') def execute(self, context): hook = CloudFunctionsHook( diff --git a/airflow/providers/google/cloud/operators/gcs.py b/airflow/providers/google/cloud/operators/gcs.py index 03593211d8f71..a4eb7f2ae9f05 100644 --- a/airflow/providers/google/cloud/operators/gcs.py +++ b/airflow/providers/google/cloud/operators/gcs.py @@ -644,7 +644,7 @@ def execute(self, context: dict) -> None: process.wait() if process.returncode: - raise AirflowException("Transform script failed: {0}".format(process.returncode)) + raise AirflowException(f"Transform script failed: {process.returncode}") self.log.info("Transformation succeeded. Output temporarily located at %s", destination_file.name) diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/airflow/providers/google/cloud/operators/mlengine.py index 8f5fa42374b03..9bd69f9476915 100644 --- a/airflow/providers/google/cloud/operators/mlengine.py +++ b/airflow/providers/google/cloud/operators/mlengine.py @@ -45,7 +45,7 @@ def _normalize_mlengine_job_id(job_id: str) -> str: # Add a prefix when a job_id starts with a digit or a template match = re.search(r'\d|\{{2}', job_id) if match and match.start() == 0: - job = 'z_{}'.format(job_id) + job = f'z_{job_id}' else: job = job_id @@ -257,7 +257,7 @@ def execute(self, context): if self._uri: prediction_request['predictionInput']['uri'] = self._uri elif self._model_name: - origin_name = 'projects/{}/models/{}'.format(self._project_id, self._model_name) + origin_name = f'projects/{self._project_id}/models/{self._model_name}' if not self._version_name: prediction_request['predictionInput']['modelName'] = origin_name else: @@ -380,7 +380,7 @@ def execute(self, context): elif self._operation == 'get': return hook.get_model(project_id=self._project_id, model_name=self._model['name']) else: - raise ValueError('Unknown operation: {}'.format(self._operation)) + raise ValueError(f'Unknown operation: {self._operation}') class MLEngineCreateModelOperator(BaseOperator): @@ -722,7 +722,7 @@ def execute(self, context): project_id=self._project_id, model_name=self._model_name, version_name=self._version['name'] ) else: - raise ValueError('Unknown operation: {}'.format(self._operation)) + raise ValueError(f'Unknown operation: {self._operation}') class MLEngineCreateVersionOperator(BaseOperator): diff --git a/airflow/providers/google/cloud/operators/text_to_speech.py b/airflow/providers/google/cloud/operators/text_to_speech.py index 6965aca1885cb..36a8f70154c86 100644 --- a/airflow/providers/google/cloud/operators/text_to_speech.py +++ b/airflow/providers/google/cloud/operators/text_to_speech.py @@ -126,7 +126,7 @@ def _validate_inputs(self) -> None: "target_filename", ]: if getattr(self, parameter) == "": - raise AirflowException("The required parameter '{}' is empty".format(parameter)) + raise AirflowException(f"The required parameter '{parameter}' is empty") def execute(self, context) -> None: hook = CloudTextToSpeechHook( diff --git a/airflow/providers/google/cloud/operators/translate_speech.py b/airflow/providers/google/cloud/operators/translate_speech.py index 645e5903cc01d..1ec734bc10428 100644 --- a/airflow/providers/google/cloud/operators/translate_speech.py +++ b/airflow/providers/google/cloud/operators/translate_speech.py @@ -168,7 +168,7 @@ def execute(self, context) -> dict: transcript = recognize_dict['results'][0]['alternatives'][0]['transcript'] except KeyError as key: raise AirflowException( - "Wrong response '{}' returned - it should contain {} field".format(recognize_dict, key) + f"Wrong response '{recognize_dict}' returned - it should contain {key} field" ) try: diff --git a/airflow/providers/google/cloud/sensors/bigquery.py b/airflow/providers/google/cloud/sensors/bigquery.py index 28771fd87a31a..c78d7c5eb43e1 100644 --- a/airflow/providers/google/cloud/sensors/bigquery.py +++ b/airflow/providers/google/cloud/sensors/bigquery.py @@ -84,7 +84,7 @@ def __init__( self.impersonation_chain = impersonation_chain def poke(self, context: dict) -> bool: - table_uri = '{0}:{1}.{2}'.format(self.project_id, self.dataset_id, self.table_id) + table_uri = f'{self.project_id}:{self.dataset_id}.{self.table_id}' self.log.info('Sensor checks existence of table: %s', table_uri) hook = BigQueryHook( bigquery_conn_id=self.bigquery_conn_id, @@ -162,7 +162,7 @@ def __init__( self.impersonation_chain = impersonation_chain def poke(self, context: dict) -> bool: - table_uri = '{0}:{1}.{2}'.format(self.project_id, self.dataset_id, self.table_id) + table_uri = f'{self.project_id}:{self.dataset_id}.{self.table_id}' self.log.info('Sensor checks existence of partition: "%s" in table: %s', self.partition_id, table_uri) hook = BigQueryHook( bigquery_conn_id=self.bigquery_conn_id, diff --git a/airflow/providers/google/cloud/sensors/dataproc.py b/airflow/providers/google/cloud/sensors/dataproc.py index 873bc60454686..fb8a990674445 100644 --- a/airflow/providers/google/cloud/sensors/dataproc.py +++ b/airflow/providers/google/cloud/sensors/dataproc.py @@ -66,9 +66,9 @@ def poke(self, context: dict) -> bool: state = job.status.state if state == JobStatus.ERROR: - raise AirflowException('Job failed:\n{}'.format(job)) + raise AirflowException(f'Job failed:\n{job}') elif state in {JobStatus.CANCELLED, JobStatus.CANCEL_PENDING, JobStatus.CANCEL_STARTED}: - raise AirflowException('Job was cancelled:\n{}'.format(job)) + raise AirflowException(f'Job was cancelled:\n{job}') elif JobStatus.DONE == state: self.log.debug("Job %s completed successfully.", self.dataproc_job_id) return True diff --git a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py index 32f52310596b6..a8f60e1e448b1 100644 --- a/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py +++ b/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py @@ -122,7 +122,7 @@ def __init__( try: self.dataset_id, self.table_id = dataset_table.split('.') except ValueError: - raise ValueError('Could not parse {} as .'.format(dataset_table)) + raise ValueError(f'Could not parse {dataset_table} as .
') def _bq_get_data(self): self.log.info('Fetching Data from:') diff --git a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py index 240a4162cfd6a..ae13fa4601779 100644 --- a/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py +++ b/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py @@ -275,9 +275,7 @@ def execute(self, context): else: schema_fields = self.schema_fields - source_uris = [ - 'gs://{}/{}'.format(self.bucket, source_object) for source_object in self.source_objects - ] + source_uris = [f'gs://{self.bucket}/{source_object}' for source_object in self.source_objects] conn = bq_hook.get_conn() cursor = conn.cursor() @@ -329,7 +327,7 @@ def execute(self, context): escaped_table_name = f'`{self.destination_project_dataset_table}`' if self.max_id_key: - cursor.execute('SELECT MAX({}) FROM {}'.format(self.max_id_key, escaped_table_name)) + cursor.execute(f'SELECT MAX({self.max_id_key}) FROM {escaped_table_name}') row = cursor.fetchone() max_id = row[0] if row[0] else 0 self.log.info( diff --git a/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py b/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py index d2e680c887f9c..ec06d772a6802 100644 --- a/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py +++ b/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py @@ -120,6 +120,6 @@ def execute(self, context: Dict): gzip=self.gzip, ) - gcs_uri = "gs://{}/{}".format(self.bucket_name, self.object_name) + gcs_uri = f"gs://{self.bucket_name}/{self.object_name}" self.log.info("%s uploaded to GCS", gcs_uri) return gcs_uri diff --git a/airflow/providers/google/cloud/utils/credentials_provider.py b/airflow/providers/google/cloud/utils/credentials_provider.py index 97d50c7dd4310..c06c5de833958 100644 --- a/airflow/providers/google/cloud/utils/credentials_provider.py +++ b/airflow/providers/google/cloud/utils/credentials_provider.py @@ -64,12 +64,12 @@ def build_gcp_conn( query_params = {} if key_file_path: - query_params["{}__key_path".format(extras)] = key_file_path + query_params[f"{extras}__key_path"] = key_file_path if scopes: scopes_string = ",".join(scopes) - query_params["{}__scope".format(extras)] = scopes_string + query_params[f"{extras}__scope"] = scopes_string if project_id: - query_params["{}__projects".format(extras)] = project_id + query_params[f"{extras}__projects"] = project_id query = urlencode(query_params) return conn.format(query) diff --git a/airflow/providers/google/cloud/utils/field_sanitizer.py b/airflow/providers/google/cloud/utils/field_sanitizer.py index 1961697bfca60..53d2b26d5db16 100644 --- a/airflow/providers/google/cloud/utils/field_sanitizer.py +++ b/airflow/providers/google/cloud/utils/field_sanitizer.py @@ -141,7 +141,7 @@ def _sanitize(self, dictionary, remaining_field_spec, current_path): "The field %s is missing in %s at the path %s. ", field_name, dictionary, current_path ) elif isinstance(child, dict): - self._sanitize(child, remaining_path, "{}.{}".format(current_path, field_name)) + self._sanitize(child, remaining_path, f"{current_path}.{field_name}") elif isinstance(child, list): for index, elem in enumerate(child): if not isinstance(elem, dict): @@ -152,7 +152,7 @@ def _sanitize(self, dictionary, remaining_field_spec, current_path): index, elem, ) - self._sanitize(elem, remaining_path, "{}.{}[{}]".format(current_path, field_name, index)) + self._sanitize(elem, remaining_path, f"{current_path}.{field_name}[{index}]") else: self.log.warning( "The field %s is of wrong type. It should be dict or list and it is %s. Skipping it.", diff --git a/airflow/providers/google/cloud/utils/field_validator.py b/airflow/providers/google/cloud/utils/field_validator.py index 3fc52a5d2366b..3df567de79d09 100644 --- a/airflow/providers/google/cloud/utils/field_validator.py +++ b/airflow/providers/google/cloud/utils/field_validator.py @@ -212,7 +212,7 @@ def _sanity_checks( ) -> None: if value is None and field_type != 'union': raise GcpFieldValidationException( - "The required body field '{}' is missing. Please add it.".format(full_field_path) + f"The required body field '{full_field_path}' is missing. Please add it." ) if regexp and field_type: raise GcpValidationSpecificationException( @@ -252,7 +252,7 @@ def _validate_regexp(full_field_path: str, regexp: str, value: str) -> None: def _validate_is_empty(full_field_path: str, value: str) -> None: if not value: raise GcpFieldValidationException( - "The body field '{}' can't be empty. Please provide a value.".format(full_field_path) + f"The body field '{full_field_path}' can't be empty. Please provide a value." ) def _validate_dict(self, children_validation_specs: Dict, full_field_path: str, value: Dict) -> None: @@ -437,7 +437,7 @@ def validate(self, body_to_validate: dict) -> None: self._validate_field(validation_spec=validation_spec, dictionary_to_validate=body_to_validate) except GcpFieldValidationException as e: raise GcpFieldValidationException( - "There was an error when validating: body '{}': '{}'".format(body_to_validate, e) + f"There was an error when validating: body '{body_to_validate}': '{e}'" ) all_field_names = [ spec['name'] diff --git a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py index 3f0e94779a640..88fe3725db22a 100644 --- a/airflow/providers/google/cloud/utils/mlengine_operator_utils.py +++ b/airflow/providers/google/cloud/utils/mlengine_operator_utils.py @@ -255,7 +255,7 @@ def apply_validate_fn(*args, templates_dict, **kwargs): prediction_path = templates_dict["prediction_path"] scheme, bucket, obj, _, _ = urlsplit(prediction_path) if scheme != "gs" or not bucket or not obj: - raise ValueError("Wrong format prediction_path: {}".format(prediction_path)) + raise ValueError(f"Wrong format prediction_path: {prediction_path}") summary = os.path.join(obj.strip("/"), "prediction.summary.json") gcs_hook = GCSHook() summary = json.loads(gcs_hook.download(bucket, summary)) diff --git a/airflow/providers/google/common/hooks/base_google.py b/airflow/providers/google/common/hooks/base_google.py index 9b47c4c43e8d9..fc29b88ae26f0 100644 --- a/airflow/providers/google/common/hooks/base_google.py +++ b/airflow/providers/google/common/hooks/base_google.py @@ -230,7 +230,7 @@ def _get_field(self, f: str, default: Any = None) -> Any: to the hook page, which allow admins to specify service_account, key_path, etc. They get formatted as shown below. """ - long_f = 'extra__google_cloud_platform__{}'.format(f) + long_f = f'extra__google_cloud_platform__{f}' if hasattr(self, 'extras') and long_f in self.extras: return self.extras[long_f] else: diff --git a/airflow/providers/google/marketing_platform/operators/analytics.py b/airflow/providers/google/marketing_platform/operators/analytics.py index b7aaae43b91e2..803984db74f25 100644 --- a/airflow/providers/google/marketing_platform/operators/analytics.py +++ b/airflow/providers/google/marketing_platform/operators/analytics.py @@ -473,7 +473,7 @@ def _modify_column_headers( ) -> None: # Check headers self.log.info("Checking if file contains headers") - with open(tmp_file_location, "r") as check_header_file: + with open(tmp_file_location) as check_header_file: has_header = csv.Sniffer().has_header(check_header_file.read(1024)) if not has_header: raise NameError( @@ -483,7 +483,7 @@ def _modify_column_headers( # Transform self.log.info("Modifying column headers to be compatible for data upload") - with open(tmp_file_location, "r") as read_file: + with open(tmp_file_location) as read_file: reader = csv.reader(read_file) headers = next(reader) new_headers = [] diff --git a/airflow/providers/google/marketing_platform/operators/campaign_manager.py b/airflow/providers/google/marketing_platform/operators/campaign_manager.py index b257854485fa4..664a0179711ec 100644 --- a/airflow/providers/google/marketing_platform/operators/campaign_manager.py +++ b/airflow/providers/google/marketing_platform/operators/campaign_manager.py @@ -337,7 +337,7 @@ def __init__( def prepare_template(self) -> None: # If .json is passed then we have to read the file if isinstance(self.report, str) and self.report.endswith('.json'): - with open(self.report, 'r') as file: + with open(self.report) as file: self.report = json.load(file) def execute(self, context: dict): diff --git a/airflow/providers/google/marketing_platform/operators/display_video.py b/airflow/providers/google/marketing_platform/operators/display_video.py index 7f023e1ffbc1e..e95016b861548 100644 --- a/airflow/providers/google/marketing_platform/operators/display_video.py +++ b/airflow/providers/google/marketing_platform/operators/display_video.py @@ -92,7 +92,7 @@ def __init__( def prepare_template(self) -> None: # If .json is passed then we have to read the file if isinstance(self.body, str) and self.body.endswith('.json'): - with open(self.body, 'r') as file: + with open(self.body) as file: self.body = json.load(file) def execute(self, context: dict) -> dict: diff --git a/airflow/providers/google/marketing_platform/operators/search_ads.py b/airflow/providers/google/marketing_platform/operators/search_ads.py index 3cf16c0731b06..e8984dcbb93cb 100644 --- a/airflow/providers/google/marketing_platform/operators/search_ads.py +++ b/airflow/providers/google/marketing_platform/operators/search_ads.py @@ -87,7 +87,7 @@ def __init__( def prepare_template(self) -> None: # If .json is passed then we have to read the file if isinstance(self.report, str) and self.report.endswith('.json'): - with open(self.report, 'r') as file: + with open(self.report) as file: self.report = json.load(file) def execute(self, context: dict): @@ -220,7 +220,7 @@ def execute(self, context: dict): response = hook.get(report_id=self.report_id) if not response['isReportReady']: - raise AirflowException('Report {} is not ready yet'.format(self.report_id)) + raise AirflowException(f'Report {self.report_id} is not ready yet') # Resolve report fragments fragments_count = len(response["files"]) diff --git a/airflow/providers/google/suite/hooks/drive.py b/airflow/providers/google/suite/hooks/drive.py index bae4c7599c17a..54769c8c2bbee 100644 --- a/airflow/providers/google/suite/hooks/drive.py +++ b/airflow/providers/google/suite/hooks/drive.py @@ -84,8 +84,8 @@ def _ensure_folders_exists(self, path: str) -> str: self.log.debug("Looking for %s directory with %s parent", current_folder, current_parent) conditions = [ "mimeType = 'application/vnd.google-apps.folder'", - "name='{}'".format(current_folder), - "'{}' in parents".format(current_parent), + f"name='{current_folder}'", + f"'{current_parent}' in parents", ] result = ( service.files() # pylint: disable=no-member diff --git a/airflow/providers/grpc/hooks/grpc.py b/airflow/providers/grpc/hooks/grpc.py index 1e2446f90b8ed..818273c497a34 100644 --- a/airflow/providers/grpc/hooks/grpc.py +++ b/airflow/providers/grpc/hooks/grpc.py @@ -135,5 +135,5 @@ def _get_field(self, field_name: str) -> str: to the hook page, which allow admins to specify scopes, credential pem files, etc. They get formatted as shown below. """ - full_field_name = 'extra__grpc__{}'.format(field_name) + full_field_name = f'extra__grpc__{field_name}' return self.extras[full_field_name] diff --git a/airflow/providers/jenkins/operators/jenkins_job_trigger.py b/airflow/providers/jenkins/operators/jenkins_job_trigger.py index 96d5acd05de9a..fede97df48a04 100644 --- a/airflow/providers/jenkins/operators/jenkins_job_trigger.py +++ b/airflow/providers/jenkins/operators/jenkins_job_trigger.py @@ -60,9 +60,7 @@ def jenkins_request_with_headers(jenkins_server: Jenkins, req: Request) -> Optio except HTTPError as e: # Jenkins's funky authentication means its nigh impossible to distinguish errors. if e.code in [401, 403, 500]: - raise JenkinsException( - 'Error in request. Possibly authentication failed [%s]: %s' % (e.code, e.reason) - ) + raise JenkinsException(f'Error in request. Possibly authentication failed [{e.code}]: {e.reason}') elif e.code == 404: raise jenkins.NotFoundException('Requested item could not be found') else: diff --git a/airflow/providers/microsoft/azure/hooks/adx.py b/airflow/providers/microsoft/azure/hooks/adx.py index 1f9fe719590e9..302cb7a82b3c1 100644 --- a/airflow/providers/microsoft/azure/hooks/adx.py +++ b/airflow/providers/microsoft/azure/hooks/adx.py @@ -94,9 +94,7 @@ def get_required_param(name: str) -> str: """Extract required parameter from extra JSON, raise exception if not found""" value = conn.extra_dejson.get(name) if not value: - raise AirflowException( - 'Extra connection option is missing required parameter: `{}`'.format(name) - ) + raise AirflowException(f'Extra connection option is missing required parameter: `{name}`') return value auth_method = get_required_param('auth_method') @@ -120,7 +118,7 @@ def get_required_param(name: str) -> str: elif auth_method == 'AAD_DEVICE': kcsb = KustoConnectionStringBuilder.with_aad_device_authentication(cluster) else: - raise AirflowException('Unknown authentication method: {}'.format(auth_method)) + raise AirflowException(f'Unknown authentication method: {auth_method}') return KustoClient(kcsb) @@ -146,4 +144,4 @@ def run_query(self, query: str, database: str, options: Optional[Dict] = None) - try: return self.connection.execute(database, query, properties=properties) except KustoServiceError as error: - raise AirflowException('Error running Kusto query: {}'.format(error)) + raise AirflowException(f'Error running Kusto query: {error}') diff --git a/airflow/providers/microsoft/azure/hooks/azure_batch.py b/airflow/providers/microsoft/azure/hooks/azure_batch.py index 5b6ccd801a3ae..864233c8315cf 100644 --- a/airflow/providers/microsoft/azure/hooks/azure_batch.py +++ b/airflow/providers/microsoft/azure/hooks/azure_batch.py @@ -60,9 +60,7 @@ def _get_required_param(name): """Extract required parameter from extra JSON, raise exception if not found""" value = conn.extra_dejson.get(name) if not value: - raise AirflowException( - 'Extra connection option is missing required parameter: `{}`'.format(name) - ) + raise AirflowException(f'Extra connection option is missing required parameter: `{name}`') return value batch_account_url = _get_required_param('account_url') @@ -249,7 +247,7 @@ def wait_for_all_node_state(self, pool_id: str, node_state: Set) -> list: pool = self.connection.pool.get(pool_id) if pool.resize_errors is not None: resize_errors = "\n".join([repr(e) for e in pool.resize_errors]) - raise RuntimeError('resize error encountered for pool {}:\n{}'.format(pool.id, resize_errors)) + raise RuntimeError(f'resize error encountered for pool {pool.id}:\n{resize_errors}') nodes = list(self.connection.compute_node.list(pool.id)) if len(nodes) >= pool.target_dedicated_nodes and all(node.state in node_state for node in nodes): return nodes diff --git a/airflow/providers/microsoft/azure/hooks/wasb.py b/airflow/providers/microsoft/azure/hooks/wasb.py index eadf86d9100f1..aedad3e1501a5 100644 --- a/airflow/providers/microsoft/azure/hooks/wasb.py +++ b/airflow/providers/microsoft/azure/hooks/wasb.py @@ -203,7 +203,7 @@ def delete_file( blobs_to_delete = [] if not ignore_if_missing and len(blobs_to_delete) == 0: - raise AirflowException('Blob(s) not found: {}'.format(blob_name)) + raise AirflowException(f'Blob(s) not found: {blob_name}') for blob_uri in blobs_to_delete: self.log.info("Deleting blob: %s", blob_uri) diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index 4fef64d0a599f..a8eb6db69a1d3 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -92,7 +92,7 @@ def close(self) -> None: remote_loc = os.path.join(self.remote_base, self.log_relative_path) if os.path.exists(local_loc): # read log and remove old logs to get just the latest additions - with open(local_loc, 'r') as logfile: + with open(local_loc) as logfile: log = logfile.read() self.wasb_write(log, remote_loc, append=True) @@ -122,7 +122,7 @@ def _read(self, ti, try_number: str, metadata: Optional[str] = None) -> Tuple[st # local machine even if there are errors reading remote logs, as # returned remote_log will contain error messages. remote_log = self.wasb_read(remote_loc, return_error=True) - log = '*** Reading remote log from {}.\n{}\n'.format(remote_loc, remote_log) + log = f'*** Reading remote log from {remote_loc}.\n{remote_log}\n' return log, {'end_of_log': True} else: return super()._read(ti, try_number) @@ -154,7 +154,7 @@ def wasb_read(self, remote_log_location: str, return_error: bool = False): try: return self.hook.read_file(self.wasb_container, remote_log_location) except AzureHttpError: - msg = 'Could not read logs from {}'.format(remote_log_location) + msg = f'Could not read logs from {remote_log_location}' self.log.exception(msg) # return error if needed if return_error: diff --git a/airflow/providers/microsoft/winrm/hooks/winrm.py b/airflow/providers/microsoft/winrm/hooks/winrm.py index 4adcd286ebcb1..645c6eceaf775 100644 --- a/airflow/providers/microsoft/winrm/hooks/winrm.py +++ b/airflow/providers/microsoft/winrm/hooks/winrm.py @@ -205,7 +205,7 @@ def get_conn(self): # If endpoint is not set, then build a standard wsman endpoint from host and port. if not self.endpoint: - self.endpoint = 'http://{0}:{1}/wsman'.format(self.remote_host, self.remote_port) + self.endpoint = f'http://{self.remote_host}:{self.remote_port}/wsman' try: if self.password and self.password.strip(): @@ -233,7 +233,7 @@ def get_conn(self): self.client = self.winrm_protocol.open_shell() except Exception as error: - error_msg = "Error connecting to host: {0}, error: {1}".format(self.remote_host, error) + error_msg = f"Error connecting to host: {self.remote_host}, error: {error}" self.log.error(error_msg) raise AirflowException(error_msg) diff --git a/airflow/providers/microsoft/winrm/operators/winrm.py b/airflow/providers/microsoft/winrm/operators/winrm.py index 8e4b5073518fc..5500e3dd4152b 100644 --- a/airflow/providers/microsoft/winrm/operators/winrm.py +++ b/airflow/providers/microsoft/winrm/operators/winrm.py @@ -129,7 +129,7 @@ def execute(self, context: dict) -> Union[list, str]: self.winrm_hook.winrm_protocol.close_shell(winrm_client) # type: ignore[attr-defined] except Exception as e: - raise AirflowException("WinRM operator error: {0}".format(str(e))) + raise AirflowException("WinRM operator error: {}".format(str(e))) if return_code == 0: # returning output if do_xcom_push is set @@ -139,7 +139,7 @@ def execute(self, context: dict) -> Union[list, str]: else: return b64encode(b''.join(stdout_buffer)).decode('utf-8') else: - error_msg = "Error running cmd: {0}, return code: {1}, error: {2}".format( + error_msg = "Error running cmd: {}, return code: {}, error: {}".format( self.command, return_code, b''.join(stderr_buffer).decode('utf-8') ) raise AirflowException(error_msg) diff --git a/airflow/providers/mongo/hooks/mongo.py b/airflow/providers/mongo/hooks/mongo.py index 5d71438e8a98d..d1fcc0b4da44f 100644 --- a/airflow/providers/mongo/hooks/mongo.py +++ b/airflow/providers/mongo/hooks/mongo.py @@ -55,11 +55,9 @@ def __init__(self, conn_id: str = 'mongo_default', *args, **kwargs) -> None: self.uri = '{scheme}://{creds}{host}{port}/{database}'.format( scheme=scheme, - creds='{}:{}@'.format(self.connection.login, self.connection.password) - if self.connection.login - else '', + creds=f'{self.connection.login}:{self.connection.password}@' if self.connection.login else '', host=self.connection.host, - port='' if self.connection.port is None else ':{}'.format(self.connection.port), + port='' if self.connection.port is None else f':{self.connection.port}', database=self.connection.schema, ) diff --git a/airflow/providers/mysql/hooks/mysql.py b/airflow/providers/mysql/hooks/mysql.py index ee175b9980782..5eaa19d9195ea 100644 --- a/airflow/providers/mysql/hooks/mysql.py +++ b/airflow/providers/mysql/hooks/mysql.py @@ -154,7 +154,7 @@ def get_uri(self) -> str: uri = super().get_uri() if conn.extra_dejson.get('charset', False): charset = conn.extra_dejson["charset"] - return "{uri}?charset={charset}".format(uri=uri, charset=charset) + return f"{uri}?charset={charset}" return uri def bulk_load(self, table: str, tmp_file: str) -> None: diff --git a/airflow/providers/oracle/hooks/oracle.py b/airflow/providers/oracle/hooks/oracle.py index 2a1ccffc9850d..020267b914f7f 100644 --- a/airflow/providers/oracle/hooks/oracle.py +++ b/airflow/providers/oracle/hooks/oracle.py @@ -147,7 +147,7 @@ def insert_rows( """ if target_fields: target_fields = ', '.join(target_fields) - target_fields = '({})'.format(target_fields) + target_fields = f'({target_fields})' else: target_fields = '' conn = self.get_conn() @@ -175,7 +175,7 @@ def insert_rows( else: lst.append(str(cell)) values = tuple(lst) - sql = 'INSERT /*+ APPEND */ ' 'INTO {0} {1} VALUES ({2})'.format( + sql = 'INSERT /*+ APPEND */ ' 'INTO {} {} VALUES ({})'.format( table, target_fields, ','.join(values) ) cur.execute(sql) diff --git a/airflow/providers/plexus/operators/job.py b/airflow/providers/plexus/operators/job.py index 3768a49d4d0dd..a1aec95cf37b5 100644 --- a/airflow/providers/plexus/operators/job.py +++ b/airflow/providers/plexus/operators/job.py @@ -77,7 +77,7 @@ def execute(self, context: Any) -> Any: ) logger.info("creating job w/ following params: %s", params) jobs_endpoint = hook.host + "jobs/" - headers = {"Authorization": "Bearer {}".format(hook.token)} + headers = {"Authorization": f"Bearer {hook.token}"} create_job = requests.post(jobs_endpoint, headers=headers, data=params, timeout=5) if create_job.ok: job = create_job.json() @@ -85,16 +85,16 @@ def execute(self, context: Any) -> Any: state = job["last_state"] while state != end_state: time.sleep(3) - jid_endpoint = jobs_endpoint + "{}/".format(jid) + jid_endpoint = jobs_endpoint + f"{jid}/" get_job = requests.get(jid_endpoint, headers=headers, timeout=5) if not get_job.ok: raise AirflowException( - "Could not retrieve job status. Status Code: [{0}]. " - "Reason: {1} - {2}".format(get_job.status_code, get_job.reason, get_job.text) + "Could not retrieve job status. Status Code: [{}]. " + "Reason: {} - {}".format(get_job.status_code, get_job.reason, get_job.text) ) new_state = get_job.json()["last_state"] if new_state in ("Cancelled", "Failed"): - raise AirflowException("Job {}".format(new_state)) + raise AirflowException(f"Job {new_state}") elif new_state != state: logger.info("job is %s", new_state) state = new_state @@ -113,7 +113,7 @@ def _api_lookup(self, param: str, hook): endpoint = hook.host + lookup[0].format(hook.user_id) else: endpoint = hook.host + lookup[0] - headers = {"Authorization": "Bearer {}".format(hook.token)} + headers = {"Authorization": f"Bearer {hook.token}"} response = requests.get(endpoint, headers=headers, timeout=5) results = response.json()["results"] @@ -127,9 +127,7 @@ def _api_lookup(self, param: str, hook): if param == 'app': self.is_service = dct['is_service'] if v is None: - raise AirflowException( - "Could not locate value for param:{} at endpoint: {}".format(key, endpoint) - ) + raise AirflowException(f"Could not locate value for param:{key} at endpoint: {endpoint}") return v diff --git a/airflow/providers/postgres/hooks/postgres.py b/airflow/providers/postgres/hooks/postgres.py index 2d67d79210a64..825a0d027036e 100644 --- a/airflow/providers/postgres/hooks/postgres.py +++ b/airflow/providers/postgres/hooks/postgres.py @@ -73,7 +73,7 @@ def _get_cursor(self, raw_cursor: str) -> CursorType: return psycopg2.extras.RealDictCursor if _cursor == 'namedtuplecursor': return psycopg2.extras.NamedTupleCursor - raise ValueError('Invalid cursor passed {}'.format(_cursor)) + raise ValueError(f'Invalid cursor passed {_cursor}') def get_conn(self) -> connection: """Establishes a connection to a postgres database.""" @@ -130,11 +130,11 @@ def copy_expert(self, sql: str, filename: str) -> None: def bulk_load(self, table: str, tmp_file: str) -> None: """Loads a tab-delimited file into a database table""" - self.copy_expert("COPY {table} FROM STDIN".format(table=table), tmp_file) + self.copy_expert(f"COPY {table} FROM STDIN", tmp_file) def bulk_dump(self, table: str, tmp_file: str) -> None: """Dumps a database table into a tab-delimited file""" - self.copy_expert("COPY {table} TO STDOUT".format(table=table), tmp_file) + self.copy_expert(f"COPY {table} TO STDOUT", tmp_file) # pylint: disable=signature-differs @staticmethod @@ -217,11 +217,11 @@ def _generate_insert_sql( if target_fields: target_fields_fragment = ", ".join(target_fields) - target_fields_fragment = "({})".format(target_fields_fragment) + target_fields_fragment = f"({target_fields_fragment})" else: target_fields_fragment = '' - sql = "INSERT INTO {0} {1} VALUES ({2})".format(table, target_fields_fragment, ",".join(placeholders)) + sql = "INSERT INTO {} {} VALUES ({})".format(table, target_fields_fragment, ",".join(placeholders)) if replace: if target_fields is None: @@ -235,7 +235,7 @@ def _generate_insert_sql( replace_target = [ "{0} = excluded.{0}".format(col) for col in target_fields if col not in replace_index_set ] - sql += " ON CONFLICT ({0}) DO UPDATE SET {1}".format( + sql += " ON CONFLICT ({}) DO UPDATE SET {}".format( ", ".join(replace_index), ", ".join(replace_target), ) diff --git a/airflow/providers/qubole/hooks/qubole.py b/airflow/providers/qubole/hooks/qubole.py index 77be6d7f51ada..eb3a374e91a77 100644 --- a/airflow/providers/qubole/hooks/qubole.py +++ b/airflow/providers/qubole/hooks/qubole.py @@ -164,7 +164,7 @@ def execute(self, context) -> None: if self.cmd.status != 'done': # type: ignore[attr-defined] raise AirflowException( - 'Command Id: {0} failed with Status: {1}'.format( + 'Command Id: {} failed with Status: {}'.format( self.cmd.id, self.cmd.status # type: ignore[attr-defined] ) ) @@ -247,7 +247,7 @@ def create_cmd_args(self, context) -> List[str]: for key, value in self.kwargs.items(): # pylint: disable=too-many-nested-blocks if key in COMMAND_ARGS[cmd_type]: if key in HYPHEN_ARGS: - args.append("--{0}={1}".format(key.replace('_', '-'), value)) + args.append("--{}={}".format(key.replace('_', '-'), value)) elif key in positional_args_list: inplace_args = value elif key == 'tags': @@ -256,9 +256,9 @@ def create_cmd_args(self, context) -> List[str]: if value is True: args.append("--notify") else: - args.append("--{0}={1}".format(key, value)) + args.append(f"--{key}={value}") - args.append("--tags={0}".format(','.join(filter(None, tags)))) + args.append("--tags={}".format(','.join(filter(None, tags)))) if inplace_args is not None: args += inplace_args.split(' ') diff --git a/airflow/providers/qubole/sensors/qubole.py b/airflow/providers/qubole/sensors/qubole.py index de65a8a55d976..dbaeed76c25a0 100644 --- a/airflow/providers/qubole/sensors/qubole.py +++ b/airflow/providers/qubole/sensors/qubole.py @@ -40,7 +40,7 @@ def __init__(self, *, data, qubole_conn_id: str = "qubole_default", **kwargs) -> if 'poke_interval' in kwargs and kwargs['poke_interval'] < 5: raise AirflowException( "Sorry, poke_interval can't be less than 5 sec for " - "task '{0}' in dag '{1}'.".format(kwargs['task_id'], kwargs['dag'].dag_id) + "task '{}' in dag '{}'.".format(kwargs['task_id'], kwargs['dag'].dag_id) ) super().__init__(**kwargs) diff --git a/airflow/providers/salesforce/hooks/salesforce.py b/airflow/providers/salesforce/hooks/salesforce.py index 3a32f8b5640da..1fea62139a48b 100644 --- a/airflow/providers/salesforce/hooks/salesforce.py +++ b/airflow/providers/salesforce/hooks/salesforce.py @@ -240,7 +240,7 @@ def write_object_to_file( """ fmt = fmt.lower() if fmt not in ['csv', 'json', 'ndjson']: - raise ValueError("Format value is not recognized: {}".format(fmt)) + raise ValueError(f"Format value is not recognized: {fmt}") df = self.object_to_df( query_results=query_results, diff --git a/airflow/providers/segment/hooks/segment.py b/airflow/providers/segment/hooks/segment.py index 1944d6d91d8c8..6de1ef76542cf 100644 --- a/airflow/providers/segment/hooks/segment.py +++ b/airflow/providers/segment/hooks/segment.py @@ -81,4 +81,4 @@ def get_conn(self) -> analytics: def on_error(self, error: str, items: str) -> None: """Handles error callbacks when using Segment with segment_debug_mode set to True""" self.log.error('Encountered Segment error: %s with ' 'items: %s', error, items) - raise AirflowException('Segment error: {}'.format(error)) + raise AirflowException(f'Segment error: {error}') diff --git a/airflow/providers/sftp/operators/sftp.py b/airflow/providers/sftp/operators/sftp.py index 086cbba40f6fd..fea060387cea8 100644 --- a/airflow/providers/sftp/operators/sftp.py +++ b/airflow/providers/sftp/operators/sftp.py @@ -105,7 +105,7 @@ def __init__( self.create_intermediate_dirs = create_intermediate_dirs if not (self.operation.lower() == SFTPOperation.GET or self.operation.lower() == SFTPOperation.PUT): raise TypeError( - "unsupported operation value {0}, expected {1} or {2}".format( + "unsupported operation value {}, expected {} or {}".format( self.operation, SFTPOperation.GET, SFTPOperation.PUT ) ) @@ -139,7 +139,7 @@ def execute(self, context: Any) -> str: local_folder = os.path.dirname(self.local_filepath) if self.create_intermediate_dirs: Path(local_folder).mkdir(parents=True, exist_ok=True) - file_msg = "from {0} to {1}".format(self.remote_filepath, self.local_filepath) + file_msg = f"from {self.remote_filepath} to {self.local_filepath}" self.log.info("Starting to transfer %s", file_msg) sftp_client.get(self.remote_filepath, self.local_filepath) else: @@ -149,12 +149,12 @@ def execute(self, context: Any) -> str: sftp_client=sftp_client, remote_directory=remote_folder, ) - file_msg = "from {0} to {1}".format(self.local_filepath, self.remote_filepath) + file_msg = f"from {self.local_filepath} to {self.remote_filepath}" self.log.info("Starting to transfer file %s", file_msg) sftp_client.put(self.local_filepath, self.remote_filepath, confirm=self.confirm) except Exception as e: - raise AirflowException("Error while transferring {0}, error: {1}".format(file_msg, str(e))) + raise AirflowException("Error while transferring {}, error: {}".format(file_msg, str(e))) return self.local_filepath diff --git a/airflow/providers/ssh/operators/ssh.py b/airflow/providers/ssh/operators/ssh.py index 181fee8c1952d..c72e0c3851d46 100644 --- a/airflow/providers/ssh/operators/ssh.py +++ b/airflow/providers/ssh/operators/ssh.py @@ -165,12 +165,10 @@ def execute(self, context) -> Union[bytes, str, bool]: else: error_msg = agg_stderr.decode('utf-8') - raise AirflowException( - "error running cmd: {0}, error: {1}".format(self.command, error_msg) - ) + raise AirflowException(f"error running cmd: {self.command}, error: {error_msg}") except Exception as e: - raise AirflowException("SSH operator error: {0}".format(str(e))) + raise AirflowException("SSH operator error: {}".format(str(e))) return True diff --git a/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py b/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py index 527d67360aa7c..330cdf6196503 100644 --- a/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py +++ b/airflow/providers/yandex/example_dags/example_yandexcloud_dataproc.py @@ -88,7 +88,7 @@ '-input', 's3a://data-proc-public/jobs/sources/data/cities500.txt.bz2', '-output', - 's3a://{bucket}/dataproc/job/results'.format(bucket=S3_BUCKET_NAME_FOR_JOB_LOGS), + f's3a://{S3_BUCKET_NAME_FOR_JOB_LOGS}/dataproc/job/results', ], properties={ 'yarn.app.mapreduce.am.resource.mb': '2048', @@ -115,7 +115,7 @@ ], args=[ 's3a://data-proc-public/jobs/sources/data/cities500.txt.bz2', - 's3a://{bucket}/dataproc/job/results/${{JOB_ID}}'.format(bucket=S3_BUCKET_NAME_FOR_JOB_LOGS), + f's3a://{S3_BUCKET_NAME_FOR_JOB_LOGS}/dataproc/job/results/${{JOB_ID}}', ], properties={ 'spark.submit.deployMode': 'cluster', @@ -136,7 +136,7 @@ ], args=[ 's3a://data-proc-public/jobs/sources/data/cities500.txt.bz2', - 's3a://{bucket}/jobs/results/${{JOB_ID}}'.format(bucket=S3_BUCKET_NAME_FOR_JOB_LOGS), + f's3a://{S3_BUCKET_NAME_FOR_JOB_LOGS}/jobs/results/${{JOB_ID}}', ], jar_file_uris=[ 's3a://data-proc-public/jobs/sources/java/dataproc-examples-1.0.jar', diff --git a/airflow/security/kerberos.py b/airflow/security/kerberos.py index 72aff08c2f886..37c1d9ff7f745 100644 --- a/airflow/security/kerberos.py +++ b/airflow/security/kerberos.py @@ -124,8 +124,7 @@ def perform_krb181_workaround(principal: str): ret = subprocess.call(cmdv, close_fds=True) if ret != 0: - principal = "%s/%s" % (principal or conf.get('kerberos', 'principal'), - socket.getfqdn()) + principal = "{}/{}".format(principal or conf.get('kerberos', 'principal'), socket.getfqdn()) princ = principal ccache = conf.get('kerberos', 'principal') log.error( diff --git a/airflow/security/utils.py b/airflow/security/utils.py index d94c89ef929bc..e5ceadb9e1ab7 100644 --- a/airflow/security/utils.py +++ b/airflow/security/utils.py @@ -55,7 +55,7 @@ def replace_hostname_pattern(components, host=None): fqdn = host if not fqdn or fqdn == '0.0.0.0': fqdn = get_hostname() - return '%s/%s@%s' % (components[0], fqdn.lower(), components[2]) + return '{}/{}@{}'.format(components[0], fqdn.lower(), components[2]) def get_fqdn(hostname_or_ip=None): @@ -76,6 +76,6 @@ def get_fqdn(hostname_or_ip=None): def principal_from_username(username, realm): """Retrieves principal from the user name and realm.""" if ('@' not in username) and realm: - username = "{}@{}".format(username, realm) + username = f"{username}@{realm}" return username diff --git a/airflow/sensors/sql_sensor.py b/airflow/sensors/sql_sensor.py index d0c72900f84a1..b8dc1432538b5 100644 --- a/airflow/sensors/sql_sensor.py +++ b/airflow/sensors/sql_sensor.py @@ -92,12 +92,12 @@ def poke(self, context): if callable(self.failure): if self.failure(first_cell): raise AirflowException( - "Failure criteria met. self.failure({}) returned True".format(first_cell)) + f"Failure criteria met. self.failure({first_cell}) returned True") else: - raise AirflowException("self.failure is present, but not callable -> {}".format(self.success)) + raise AirflowException(f"self.failure is present, but not callable -> {self.success}") if self.success is not None: if callable(self.success): return self.success(first_cell) else: - raise AirflowException("self.success is present, but not callable -> {}".format(self.success)) + raise AirflowException(f"self.success is present, but not callable -> {self.success}") return bool(first_cell) diff --git a/airflow/serialization/json_schema.py b/airflow/serialization/json_schema.py index f697b5e5b870b..bd941ef8ed3a7 100644 --- a/airflow/serialization/json_schema.py +++ b/airflow/serialization/json_schema.py @@ -55,7 +55,7 @@ def load_dag_schema_dict() -> dict: schema_file = pkgutil.get_data(__name__, schema_file_name) if schema_file is None: - raise AirflowException("Schema file {} does not exists".format(schema_file_name)) + raise AirflowException(f"Schema file {schema_file_name} does not exists") schema = json.loads(schema_file.decode()) return schema diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 2ade55f757048..5b0def99ce751 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -110,7 +110,7 @@ def from_dict(cls, serialized_obj: Dict[Encoding, Any]) -> \ def validate_schema(cls, serialized_obj: Union[str, dict]) -> None: """Validate serialized_obj satisfies JSON schema.""" if cls._json_schema is None: - raise AirflowException('JSON schema of {:s} is not set.'.format(cls.__name__)) + raise AirflowException(f'JSON schema of {cls.__name__:s} is not set.') if isinstance(serialized_obj, dict): cls._json_schema.validate(serialized_obj) @@ -274,7 +274,7 @@ def _deserialize(cls, encoded_var: Any) -> Any: # pylint: disable=too-many-retu elif type_ == DAT.TUPLE: return tuple([cls._deserialize(v) for v in var]) else: - raise TypeError('Invalid type {!s} in deserialization.'.format(type_)) + raise TypeError(f'Invalid type {type_!s} in deserialization.') _deserialize_datetime = pendulum.from_timestamp _deserialize_timezone = pendulum.tz.timezone @@ -662,7 +662,7 @@ def from_dict(cls, serialized_obj: dict) -> 'SerializedDAG': """Deserializes a python dict in to the DAG and operators it contains.""" ver = serialized_obj.get('__version', '') if ver != cls.SERIALIZER_VERSION: - raise ValueError("Unsure how to deserialize version {!r}".format(ver)) + raise ValueError(f"Unsure how to deserialize version {ver!r}") return cls.deserialize_dag(serialized_obj['dag']) diff --git a/airflow/smart_sensor_dags/smart_sensor_group.py b/airflow/smart_sensor_dags/smart_sensor_group.py index c54409300b906..fcd166aeb0ec2 100644 --- a/airflow/smart_sensor_dags/smart_sensor_group.py +++ b/airflow/smart_sensor_dags/smart_sensor_group.py @@ -35,7 +35,7 @@ shard_min = (i * shard_code_upper_limit) / num_smart_sensor_shard shard_max = ((i + 1) * shard_code_upper_limit) / num_smart_sensor_shard - dag_id = 'smart_sensor_group_shard_{}'.format(i) + dag_id = f'smart_sensor_group_shard_{i}' dag = DAG( dag_id=dag_id, default_args=args, diff --git a/airflow/task/task_runner/base_task_runner.py b/airflow/task/task_runner/base_task_runner.py index a05f702e47be7..851d138620eda 100644 --- a/airflow/task/task_runner/base_task_runner.py +++ b/airflow/task/task_runner/base_task_runner.py @@ -75,7 +75,7 @@ def __init__(self, local_task_job): popen_prepend = ['sudo', '-E', '-H', '-u', self.run_as_user] if pythonpath_value: - popen_prepend.append('{}={}'.format(PYTHONPATH_VAR, pythonpath_value)) + popen_prepend.append(f'{PYTHONPATH_VAR}={pythonpath_value}') else: # Always provide a copy of the configuration file settings. Since diff --git a/airflow/task/task_runner/cgroup_task_runner.py b/airflow/task/task_runner/cgroup_task_runner.py index 79b3f18541dcd..ab1e88c16611a 100644 --- a/airflow/task/task_runner/cgroup_task_runner.py +++ b/airflow/task/task_runner/cgroup_task_runner.py @@ -137,8 +137,8 @@ def start(self): strftime("%Y-%m-%d"), str(uuid.uuid4())) - self.mem_cgroup_name = "memory/{}".format(cgroup_name) - self.cpu_cgroup_name = "cpu/{}".format(cgroup_name) + self.mem_cgroup_name = f"memory/{cgroup_name}" + self.cpu_cgroup_name = f"cpu/{cgroup_name}" # Get the resource requirements from the task task = self._task_instance.task @@ -173,7 +173,7 @@ def start(self): cgroup_name ) self.process = self.run_command( - ['cgexec', '-g', 'cpu,memory:{}'.format(cgroup_name)] + ['cgexec', '-g', f'cpu,memory:{cgroup_name}'] ) def return_code(self): diff --git a/airflow/ti_deps/deps/base_ti_dep.py b/airflow/ti_deps/deps/base_ti_dep.py index b3405a8657fcf..335c55bc3e5f2 100644 --- a/airflow/ti_deps/deps/base_ti_dep.py +++ b/airflow/ti_deps/deps/base_ti_dep.py @@ -47,7 +47,7 @@ def __hash__(self): return hash(type(self)) def __repr__(self): - return "".format(self=self) + return f"" @property def name(self): diff --git a/airflow/ti_deps/deps/dag_ti_slots_available_dep.py b/airflow/ti_deps/deps/dag_ti_slots_available_dep.py index 57bc92af42964..e48de027ffee9 100644 --- a/airflow/ti_deps/deps/dag_ti_slots_available_dep.py +++ b/airflow/ti_deps/deps/dag_ti_slots_available_dep.py @@ -30,6 +30,6 @@ class DagTISlotsAvailableDep(BaseTIDep): def _get_dep_statuses(self, ti, session, dep_context): if ti.task.dag.get_concurrency_reached(session): yield self._failing_status( - reason="The maximum number of running tasks ({0}) for this task's DAG " - "'{1}' has been reached.".format(ti.task.dag.concurrency, - ti.dag_id)) + reason="The maximum number of running tasks ({}) for this task's DAG " + "'{}' has been reached.".format(ti.task.dag.concurrency, + ti.dag_id)) diff --git a/airflow/ti_deps/deps/dag_unpaused_dep.py b/airflow/ti_deps/deps/dag_unpaused_dep.py index bc8cd300c1680..b65acc3f6cc26 100644 --- a/airflow/ti_deps/deps/dag_unpaused_dep.py +++ b/airflow/ti_deps/deps/dag_unpaused_dep.py @@ -30,4 +30,4 @@ class DagUnpausedDep(BaseTIDep): def _get_dep_statuses(self, ti, session, dep_context): if ti.task.dag.get_is_paused(session): yield self._failing_status( - reason="Task's DAG '{0}' is paused.".format(ti.dag_id)) + reason=f"Task's DAG '{ti.dag_id}' is paused.") diff --git a/airflow/ti_deps/deps/dagrun_exists_dep.py b/airflow/ti_deps/deps/dagrun_exists_dep.py index a26c629dae970..bb1528170e99b 100644 --- a/airflow/ti_deps/deps/dagrun_exists_dep.py +++ b/airflow/ti_deps/deps/dagrun_exists_dep.py @@ -42,14 +42,14 @@ def _get_dep_statuses(self, ti, session, dep_context): ) if len(running_dagruns) >= dag.max_active_runs: - reason = ("The maximum number of active dag runs ({0}) for this task " - "instance's DAG '{1}' has been reached.".format( + reason = ("The maximum number of active dag runs ({}) for this task " + "instance's DAG '{}' has been reached.".format( dag.max_active_runs, ti.dag_id)) else: reason = "Unknown reason" yield self._failing_status( - reason="Task instance's dagrun did not exist: {0}.".format(reason)) + reason=f"Task instance's dagrun did not exist: {reason}.") else: if dagrun.state != State.RUNNING: yield self._failing_status( diff --git a/airflow/ti_deps/deps/exec_date_after_start_date_dep.py b/airflow/ti_deps/deps/exec_date_after_start_date_dep.py index 0b6a0030ce11e..c1ca967eaac02 100644 --- a/airflow/ti_deps/deps/exec_date_after_start_date_dep.py +++ b/airflow/ti_deps/deps/exec_date_after_start_date_dep.py @@ -30,15 +30,15 @@ class ExecDateAfterStartDateDep(BaseTIDep): def _get_dep_statuses(self, ti, session, dep_context): if ti.task.start_date and ti.execution_date < ti.task.start_date: yield self._failing_status( - reason="The execution date is {0} but this is before the task's start " - "date {1}.".format( + reason="The execution date is {} but this is before the task's start " + "date {}.".format( ti.execution_date.isoformat(), ti.task.start_date.isoformat())) if (ti.task.dag and ti.task.dag.start_date and ti.execution_date < ti.task.dag.start_date): yield self._failing_status( - reason="The execution date is {0} but this is before the task's " - "DAG's start date {1}.".format( + reason="The execution date is {} but this is before the task's " + "DAG's start date {}.".format( ti.execution_date.isoformat(), ti.task.dag.start_date.isoformat())) diff --git a/airflow/ti_deps/deps/not_in_retry_period_dep.py b/airflow/ti_deps/deps/not_in_retry_period_dep.py index 5cfab757c8ed6..d19e0c8117d7e 100644 --- a/airflow/ti_deps/deps/not_in_retry_period_dep.py +++ b/airflow/ti_deps/deps/not_in_retry_period_dep.py @@ -48,6 +48,6 @@ def _get_dep_statuses(self, ti, session, dep_context): if ti.is_premature: yield self._failing_status( reason="Task is not ready for retry yet but will be retried " - "automatically. Current date is {0} and task will be retried " - "at {1}.".format(cur_date.isoformat(), - next_task_retry_date.isoformat())) + "automatically. Current date is {} and task will be retried " + "at {}.".format(cur_date.isoformat(), + next_task_retry_date.isoformat())) diff --git a/airflow/ti_deps/deps/prev_dagrun_dep.py b/airflow/ti_deps/deps/prev_dagrun_dep.py index 2e7c53348f330..bdd2bf622be6c 100644 --- a/airflow/ti_deps/deps/prev_dagrun_dep.py +++ b/airflow/ti_deps/deps/prev_dagrun_dep.py @@ -75,12 +75,12 @@ def _get_dep_statuses(self, ti, session, dep_context): if previous_ti.state not in {State.SKIPPED, State.SUCCESS}: yield self._failing_status( reason="depends_on_past is true for this task, but the previous task " - "instance {0} is in the state '{1}' which is not a successful " + "instance {} is in the state '{}' which is not a successful " "state.".format(previous_ti, previous_ti.state)) previous_ti.task = ti.task if (ti.task.wait_for_downstream and not previous_ti.are_dependents_done(session=session)): yield self._failing_status( - reason="The tasks downstream of the previous task instance {0} haven't " + reason="The tasks downstream of the previous task instance {} haven't " "completed (and wait_for_downstream is True).".format(previous_ti)) diff --git a/airflow/ti_deps/deps/ready_to_reschedule.py b/airflow/ti_deps/deps/ready_to_reschedule.py index 4b8144844f6dc..bdce41fb46e37 100644 --- a/airflow/ti_deps/deps/ready_to_reschedule.py +++ b/airflow/ti_deps/deps/ready_to_reschedule.py @@ -70,5 +70,5 @@ def _get_dep_statuses(self, ti, session, dep_context): yield self._failing_status( reason="Task is not ready for reschedule yet but will be rescheduled " - "automatically. Current date is {0} and task will be rescheduled " - "at {1}.".format(now.isoformat(), next_reschedule_date.isoformat())) + "automatically. Current date is {} and task will be rescheduled " + "at {}.".format(now.isoformat(), next_reschedule_date.isoformat())) diff --git a/airflow/ti_deps/deps/runnable_exec_date_dep.py b/airflow/ti_deps/deps/runnable_exec_date_dep.py index add9846b33771..20cb8b6995e1f 100644 --- a/airflow/ti_deps/deps/runnable_exec_date_dep.py +++ b/airflow/ti_deps/deps/runnable_exec_date_dep.py @@ -35,14 +35,14 @@ def _get_dep_statuses(self, ti, session, dep_context): # specified by config and schedule_interval is None if ti.execution_date > cur_date and not ti.task.dag.allow_future_exec_dates: yield self._failing_status( - reason="Execution date {0} is in the future (the current " - "date is {1}).".format(ti.execution_date.isoformat(), - cur_date.isoformat())) + reason="Execution date {} is in the future (the current " + "date is {}).".format(ti.execution_date.isoformat(), + cur_date.isoformat())) if ti.task.end_date and ti.execution_date > ti.task.end_date: yield self._failing_status( - reason="The execution date is {0} but this is after the task's end date " - "{1}.".format( + reason="The execution date is {} but this is after the task's end date " + "{}.".format( ti.execution_date.isoformat(), ti.task.end_date.isoformat())) @@ -50,7 +50,7 @@ def _get_dep_statuses(self, ti, session, dep_context): ti.task.dag.end_date and ti.execution_date > ti.task.dag.end_date): yield self._failing_status( - reason="The execution date is {0} but this is after the task's DAG's " - "end date {1}.".format( + reason="The execution date is {} but this is after the task's DAG's " + "end date {}.".format( ti.execution_date.isoformat(), ti.task.dag.end_date.isoformat())) diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow/ti_deps/deps/trigger_rule_dep.py index 237a26fe746a0..3544781e21601 100644 --- a/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow/ti_deps/deps/trigger_rule_dep.py @@ -152,70 +152,70 @@ def _evaluate_trigger_rule( # pylint: disable=too-many-branches if trigger_rule == TR.ONE_SUCCESS: if successes <= 0: yield self._failing_status( - reason="Task's trigger rule '{0}' requires one upstream " + reason="Task's trigger rule '{}' requires one upstream " "task success, but none were found. " - "upstream_tasks_state={1}, upstream_task_ids={2}" + "upstream_tasks_state={}, upstream_task_ids={}" .format(trigger_rule, upstream_tasks_state, task.upstream_task_ids)) elif trigger_rule == TR.ONE_FAILED: if not failed and not upstream_failed: yield self._failing_status( - reason="Task's trigger rule '{0}' requires one upstream " + reason="Task's trigger rule '{}' requires one upstream " "task failure, but none were found. " - "upstream_tasks_state={1}, upstream_task_ids={2}" + "upstream_tasks_state={}, upstream_task_ids={}" .format(trigger_rule, upstream_tasks_state, task.upstream_task_ids)) elif trigger_rule == TR.ALL_SUCCESS: num_failures = upstream - successes if num_failures > 0: yield self._failing_status( - reason="Task's trigger rule '{0}' requires all upstream " - "tasks to have succeeded, but found {1} non-success(es). " - "upstream_tasks_state={2}, upstream_task_ids={3}" + reason="Task's trigger rule '{}' requires all upstream " + "tasks to have succeeded, but found {} non-success(es). " + "upstream_tasks_state={}, upstream_task_ids={}" .format(trigger_rule, num_failures, upstream_tasks_state, task.upstream_task_ids)) elif trigger_rule == TR.ALL_FAILED: num_successes = upstream - failed - upstream_failed if num_successes > 0: yield self._failing_status( - reason="Task's trigger rule '{0}' requires all upstream " - "tasks to have failed, but found {1} non-failure(s). " - "upstream_tasks_state={2}, upstream_task_ids={3}" + reason="Task's trigger rule '{}' requires all upstream " + "tasks to have failed, but found {} non-failure(s). " + "upstream_tasks_state={}, upstream_task_ids={}" .format(trigger_rule, num_successes, upstream_tasks_state, task.upstream_task_ids)) elif trigger_rule == TR.ALL_DONE: if not upstream_done: yield self._failing_status( - reason="Task's trigger rule '{0}' requires all upstream " - "tasks to have completed, but found {1} task(s) that " - "were not done. upstream_tasks_state={2}, " - "upstream_task_ids={3}" + reason="Task's trigger rule '{}' requires all upstream " + "tasks to have completed, but found {} task(s) that " + "were not done. upstream_tasks_state={}, " + "upstream_task_ids={}" .format(trigger_rule, upstream_done, upstream_tasks_state, task.upstream_task_ids)) elif trigger_rule == TR.NONE_FAILED: num_failures = upstream - successes - skipped if num_failures > 0: yield self._failing_status( - reason="Task's trigger rule '{0}' requires all upstream " - "tasks to have succeeded or been skipped, but found {1} non-success(es). " - "upstream_tasks_state={2}, upstream_task_ids={3}" + reason="Task's trigger rule '{}' requires all upstream " + "tasks to have succeeded or been skipped, but found {} non-success(es). " + "upstream_tasks_state={}, upstream_task_ids={}" .format(trigger_rule, num_failures, upstream_tasks_state, task.upstream_task_ids)) elif trigger_rule == TR.NONE_FAILED_OR_SKIPPED: num_failures = upstream - successes - skipped if num_failures > 0: yield self._failing_status( - reason="Task's trigger rule '{0}' requires all upstream " - "tasks to have succeeded or been skipped, but found {1} non-success(es). " - "upstream_tasks_state={2}, upstream_task_ids={3}" + reason="Task's trigger rule '{}' requires all upstream " + "tasks to have succeeded or been skipped, but found {} non-success(es). " + "upstream_tasks_state={}, upstream_task_ids={}" .format(trigger_rule, num_failures, upstream_tasks_state, task.upstream_task_ids)) elif trigger_rule == TR.NONE_SKIPPED: if not upstream_done or (skipped > 0): yield self._failing_status( - reason="Task's trigger rule '{0}' requires all upstream " - "tasks to not have been skipped, but found {1} task(s) skipped. " - "upstream_tasks_state={2}, upstream_task_ids={3}" + reason="Task's trigger rule '{}' requires all upstream " + "tasks to not have been skipped, but found {} task(s) skipped. " + "upstream_tasks_state={}, upstream_task_ids={}" .format(trigger_rule, skipped, upstream_tasks_state, task.upstream_task_ids)) else: yield self._failing_status( - reason="No strategy to evaluate trigger rule '{0}'.".format(trigger_rule)) + reason=f"No strategy to evaluate trigger rule '{trigger_rule}'.") diff --git a/airflow/ti_deps/deps/valid_state_dep.py b/airflow/ti_deps/deps/valid_state_dep.py index 2f0b32a11562b..1609afb819e0c 100644 --- a/airflow/ti_deps/deps/valid_state_dep.py +++ b/airflow/ti_deps/deps/valid_state_dep.py @@ -56,10 +56,10 @@ def _get_dep_statuses(self, ti, session, dep_context): return if ti.state in self._valid_states: - yield self._passing_status(reason="Task state {} was valid.".format(ti.state)) + yield self._passing_status(reason=f"Task state {ti.state} was valid.") return yield self._failing_status( - reason="Task is in the '{0}' state which is not a valid state for " + reason="Task is in the '{}' state which is not a valid state for " "execution. The task must be cleared in order to be run.".format( ti.state)) diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py index b37f2c96521d1..d1fdf63d880d4 100644 --- a/airflow/utils/cli.py +++ b/airflow/utils/cli.py @@ -116,7 +116,7 @@ def _build_metrics(func_name, namespace): full_command[idx] = f'{sensitive_field}={"*" * 8}' metrics = {'sub_command': func_name, 'start_datetime': datetime.utcnow(), - 'full_command': '{}'.format(full_command), 'user': getpass.getuser()} + 'full_command': f'{full_command}', 'user': getpass.getuser()} if not isinstance(namespace, Namespace): raise ValueError("namespace argument should be argparse.Namespace instance," @@ -129,7 +129,7 @@ def _build_metrics(func_name, namespace): extra = json.dumps({k: metrics[k] for k in ('host_name', 'full_command')}) log = Log( - event='cli_{}'.format(func_name), + event=f'cli_{func_name}', task_instance=None, owner=metrics['user'], extra=extra, @@ -198,13 +198,13 @@ def get_dag_by_pickle(pickle_id, session=None): def setup_locations(process, pid=None, stdout=None, stderr=None, log=None): """Creates logging paths""" if not stderr: - stderr = os.path.join(settings.AIRFLOW_HOME, 'airflow-{}.err'.format(process)) + stderr = os.path.join(settings.AIRFLOW_HOME, f'airflow-{process}.err') if not stdout: - stdout = os.path.join(settings.AIRFLOW_HOME, 'airflow-{}.out'.format(process)) + stdout = os.path.join(settings.AIRFLOW_HOME, f'airflow-{process}.out') if not log: - log = os.path.join(settings.AIRFLOW_HOME, 'airflow-{}.log'.format(process)) + log = os.path.join(settings.AIRFLOW_HOME, f'airflow-{process}.log') if not pid: - pid = os.path.join(settings.AIRFLOW_HOME, 'airflow-{}.pid'.format(process)) + pid = os.path.join(settings.AIRFLOW_HOME, f'airflow-{process}.pid') return pid, stdout, stderr, log @@ -234,7 +234,7 @@ def sigquit_handler(sig, frame): # pylint: disable=unused-argument Helps debug deadlocks by printing stacktraces when this gets a SIGQUIT e.g. kill -s QUIT or CTRL+\ """ - print("Dumping stack traces for all threads in PID {}".format(os.getpid())) + print(f"Dumping stack traces for all threads in PID {os.getpid()}") id_to_name = {th.ident: th.name for th in threading.enumerate()} code = [] for thread_id, stack in sys._current_frames().items(): # pylint: disable=protected-access @@ -244,7 +244,7 @@ def sigquit_handler(sig, frame): # pylint: disable=unused-argument code.append('File: "{}", line {}, in {}' .format(filename, line_number, name)) if line: - code.append(" {}".format(line.strip())) + code.append(f" {line.strip()}") print("\n".join(code)) diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index bdcc7f6ce971b..e1b0358f379ee 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -806,11 +806,11 @@ def _log_file_processing_stats(self, known_file_paths): last_run = self.get_last_finish_time(file_path) if last_run: seconds_ago = (now - last_run).total_seconds() - Stats.gauge('dag_processing.last_run.seconds_ago.{}'.format(file_name), seconds_ago) + Stats.gauge(f'dag_processing.last_run.seconds_ago.{file_name}', seconds_ago) if runtime: - Stats.timing('dag_processing.last_duration.{}'.format(file_name), runtime) + Stats.timing(f'dag_processing.last_duration.{file_name}', runtime) # TODO: Remove before Airflow 2.0 - Stats.timing('dag_processing.last_runtime.{}'.format(file_name), runtime) + Stats.timing(f'dag_processing.last_runtime.{file_name}', runtime) rows.append((file_path, processor_pid, @@ -827,10 +827,10 @@ def _log_file_processing_stats(self, known_file_paths): for file_path, pid, runtime, num_dags, num_errors, last_runtime, last_run in rows: formatted_rows.append((file_path, pid, - "{:.2f}s".format(runtime.total_seconds()) if runtime else None, + f"{runtime.total_seconds():.2f}s" if runtime else None, num_dags, num_errors, - "{:.2f}s".format(last_runtime) if last_runtime else None, + f"{last_runtime:.2f}s" if last_runtime else None, last_run.strftime("%Y-%m-%dT%H:%M:%S") if last_run else None )) log_str = ("\n" + diff --git a/airflow/utils/decorators.py b/airflow/utils/decorators.py index a101cee743239..e8238c971e95e 100644 --- a/airflow/utils/decorators.py +++ b/airflow/utils/decorators.py @@ -84,7 +84,7 @@ def wrapper(*args: Any, **kwargs: Any) -> Any: missing_args = list(non_optional_args - set(kwargs)) if missing_args: - msg = "Argument {0} is required".format(missing_args) + msg = f"Argument {missing_args} is required" raise AirflowException(msg) kwargs['params'] = dag_params diff --git a/airflow/utils/docs.py b/airflow/utils/docs.py index 6d3b4d3d8ced5..f43652b218643 100644 --- a/airflow/utils/docs.py +++ b/airflow/utils/docs.py @@ -25,7 +25,7 @@ def get_docs_url(page: Optional[str] = None) -> str: if "dev" in version.version: result = "https://airflow.readthedocs.io/en/latest/" else: - result = 'https://airflow.apache.org/docs/{}/'.format(version.version) + result = f'https://airflow.apache.org/docs/{version.version}/' if page: result = result + page return result diff --git a/airflow/utils/email.py b/airflow/utils/email.py index b40d5d2c95ebe..f32757d2633c6 100644 --- a/airflow/utils/email.py +++ b/airflow/utils/email.py @@ -184,7 +184,7 @@ def get_email_address_list(addresses: Union[str, Iterable[str]]) -> List[str]: return list(addresses) received_type = type(addresses).__name__ - raise TypeError("Unexpected argument type: Received '{}'.".format(received_type)) + raise TypeError(f"Unexpected argument type: Received '{received_type}'.") def _get_email_list_from_str(addresses: str) -> List[str]: diff --git a/airflow/utils/file.py b/airflow/utils/file.py index ebd592abf7d6e..405a6b029548e 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -102,7 +102,7 @@ def find_path_from_directory( ignore_file_path = os.path.join(root, ignore_file_name) if os.path.isfile(ignore_file_path): - with open(ignore_file_path, 'r') as file: + with open(ignore_file_path) as file: lines_no_comments = [re.sub(r"\s*#.*", "", line) for line in file.read().split("\n")] patterns += [re.compile(line) for line in lines_no_comments if line] patterns = list(set(patterns)) diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py index e91e1da2f2cde..01f88fcbac790 100644 --- a/airflow/utils/helpers.py +++ b/airflow/utils/helpers.py @@ -37,7 +37,7 @@ def validate_key(k, max_length=250): raise TypeError("The key has to be a string") elif len(k) > max_length: raise AirflowException( - "The key has to be less than {0} characters".format(max_length)) + f"The key has to be less than {max_length} characters") elif not KEY_REGEX.match(k): raise AirflowException( "The key ({k}) has to be made of alphanumeric characters, dashes, " diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index 5f86dd482afa2..b1170174c35aa 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -113,10 +113,10 @@ def _read(self, ti, try_number, metadata=None): # pylint: disable=unused-argume if os.path.exists(location): try: with open(location) as file: - log += "*** Reading local file: {}\n".format(location) + log += f"*** Reading local file: {location}\n" log += "".join(file.readlines()) except Exception as e: # pylint: disable=broad-except - log = "*** Failed to load local log file: {}\n".format(location) + log = f"*** Failed to load local log file: {location}\n" log += "*** {}\n".format(str(e)) elif conf.get('core', 'executor') == 'KubernetesExecutor': # pylint: disable=too-many-nested-blocks try: @@ -161,8 +161,8 @@ def _read(self, ti, try_number, metadata=None): # pylint: disable=unused-argume ti=ti, worker_log_server_port=conf.get('celery', 'WORKER_LOG_SERVER_PORT') ) - log += "*** Log file does not exist: {}\n".format(location) - log += "*** Fetching from: {}\n".format(url) + log += f"*** Log file does not exist: {location}\n" + log += f"*** Fetching from: {url}\n" try: timeout = None # No timeout try: @@ -203,7 +203,7 @@ def read(self, task_instance, try_number=None, metadata=None): try_numbers = list(range(1, next_try)) elif try_number < 1: logs = [ - [('default_host', 'Error fetching the logs. Try number {} is invalid.'.format(try_number))], + [('default_host', f'Error fetching the logs. Try number {try_number} is invalid.')], ] return logs else: diff --git a/airflow/utils/module_loading.py b/airflow/utils/module_loading.py index 184f6dd03d869..dbd5622d203f3 100644 --- a/airflow/utils/module_loading.py +++ b/airflow/utils/module_loading.py @@ -27,7 +27,7 @@ def import_string(dotted_path): try: module_path, class_name = dotted_path.rsplit('.', 1) except ValueError: - raise ImportError("{} doesn't look like a module path".format(dotted_path)) + raise ImportError(f"{dotted_path} doesn't look like a module path") module = import_module(module_path) diff --git a/airflow/utils/python_virtualenv.py b/airflow/utils/python_virtualenv.py index 3bce32452a44e..486371904647b 100644 --- a/airflow/utils/python_virtualenv.py +++ b/airflow/utils/python_virtualenv.py @@ -30,7 +30,7 @@ def _generate_virtualenv_cmd(tmp_dir: str, python_bin: str, system_site_packages if system_site_packages: cmd.append('--system-site-packages') if python_bin is not None: - cmd.append('--python={}'.format(python_bin)) + cmd.append(f'--python={python_bin}') return cmd @@ -38,7 +38,7 @@ def _generate_pip_install_cmd(tmp_dir: str, requirements: List[str]) -> Optional if not requirements: return None # direct path alleviates need to activate - cmd = ['{}/bin/pip'.format(tmp_dir), 'install'] + cmd = [f'{tmp_dir}/bin/pip', 'install'] return cmd + requirements @@ -69,7 +69,7 @@ def prepare_virtualenv( if pip_cmd: execute_in_subprocess(pip_cmd) - return '{}/bin/python'.format(venv_directory) + return f'{venv_directory}/bin/python' def write_python_script(jinja_context: dict, filename: str): diff --git a/airflow/utils/weekday.py b/airflow/utils/weekday.py index a57281267b2c5..d83b134956099 100644 --- a/airflow/utils/weekday.py +++ b/airflow/utils/weekday.py @@ -43,7 +43,7 @@ def get_weekday_number(cls, week_day_str): if sanitized_week_day_str not in cls.__members__: raise AttributeError( - 'Invalid Week Day passed: "{}"'.format(week_day_str) + f'Invalid Week Day passed: "{week_day_str}"' ) return cls[sanitized_week_day_str] diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index 6d8000f3f42fa..60b5dc6102c7e 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -114,7 +114,7 @@ def trigger_dag(dag_id): dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date, replace_microseconds) except AirflowException as err: log.error(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -122,7 +122,7 @@ def trigger_dag(dag_id): log.info("User %s created %s", g.user, dr) response = jsonify( - message="Created {}".format(dr), + message=f"Created {dr}", execution_date=dr.execution_date.isoformat(), run_id=dr.run_id ) @@ -137,10 +137,10 @@ def delete_dag(dag_id): count = delete.delete_dag(dag_id) except AirflowException as err: log.error(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response - return jsonify(message="Removed {} record(s)".format(count), count=count) + return jsonify(message=f"Removed {count} record(s)", count=count) @api_experimental.route('/dags//dag_runs', methods=['GET']) @@ -159,7 +159,7 @@ def dag_runs(dag_id): dagruns = get_dag_runs(dag_id, state) except AirflowException as err: log.info(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = 400 return response @@ -188,7 +188,7 @@ def get_dag_code(dag_id): return get_code(dag_id) except AirflowException as err: log.info(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -201,7 +201,7 @@ def task_info(dag_id, task_id): t_info = get_task(dag_id, task_id) except AirflowException as err: log.info(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -264,7 +264,7 @@ def task_instance_info(dag_id, execution_date, task_id): ti_info = get_task_instance(dag_id, task_id, execution_date) except AirflowException as err: log.info(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -304,7 +304,7 @@ def dag_run_status(dag_id, execution_date): dr_info = get_dag_run_state(dag_id, execution_date) except AirflowException as err: log.info(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -339,7 +339,7 @@ def get_pool(name): pool = pool_api.get_pool(name=name) except AirflowException as err: log.error(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: @@ -354,7 +354,7 @@ def get_pools(): pools = pool_api.get_pools() except AirflowException as err: log.error(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: @@ -370,7 +370,7 @@ def create_pool(): pool = pool_api.create_pool(**params) except AirflowException as err: log.error(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: @@ -385,7 +385,7 @@ def delete_pool(name): pool = pool_api.delete_pool(name=name) except AirflowException as err: log.error(err) - response = jsonify(error="{}".format(err)) + response = jsonify(error=f"{err}") response.status_code = err.status_code return response else: diff --git a/airflow/www/extensions/init_manifest_files.py b/airflow/www/extensions/init_manifest_files.py index 5abcee347a20d..0294fc394905d 100644 --- a/airflow/www/extensions/init_manifest_files.py +++ b/airflow/www/extensions/init_manifest_files.py @@ -34,7 +34,7 @@ def configure_manifest_files(app): def parse_manifest_json(): try: manifest_file = os.path.join(os.path.dirname(__file__), os.pardir, 'static/dist/manifest.json') - with open(manifest_file, 'r') as file: + with open(manifest_file) as file: manifest.update(json.load(file)) for source, target in manifest.copy().items(): diff --git a/airflow/www/security.py b/airflow/www/security.py index b09d9a35563d5..be35ddbd65ef6 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -186,7 +186,7 @@ def delete_role(self, role_name): session.delete(role) session.commit() else: - raise AirflowException("Role named '{}' does not exist".format(role_name)) + raise AirflowException(f"Role named '{role_name}' does not exist") @staticmethod def get_user_roles(user=None): diff --git a/airflow/www/utils.py b/airflow/www/utils.py index 93dfbd8b44f12..697f8806b1903 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -329,7 +329,7 @@ def wrapped_markdown(s, css_class=None): return None return Markup( - '
'.format(css_class=css_class) + markdown.markdown(s) + "
" + f'
' + markdown.markdown(s) + "
" ) diff --git a/airflow/www/validators.py b/airflow/www/validators.py index 2b0fed7cd773c..282789c15dbb0 100644 --- a/airflow/www/validators.py +++ b/airflow/www/validators.py @@ -76,7 +76,7 @@ def __call__(self, form, field): try: json.loads(field.data) except JSONDecodeError as ex: - message = self.message or 'JSON Validation Error: {}'.format(ex) + message = self.message or f'JSON Validation Error: {ex}' raise ValidationError( message=field.gettext(message.format(field.data)) ) diff --git a/airflow/www/views.py b/airflow/www/views.py index 47ebac0ee25b5..3d16e77924bd0 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -781,7 +781,7 @@ def code(self, session=None): except Exception as e: # pylint: disable=broad-except all_errors += ( "Exception encountered during " + - "dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{}\n".format(e) + f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n" ) html_code = Markup('

Failed to load file.

Details: {}

').format( # noqa escape(all_errors)) @@ -1243,7 +1243,7 @@ def run(self): failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context)) if failed_deps: failed_deps_str = ", ".join( - ["{}: {}".format(dep.dep_name, dep.reason) for dep in failed_deps]) + [f"{dep.dep_name}: {dep.reason}" for dep in failed_deps]) flash("Could not queue task instance for execution, dependencies not met: " "{}".format(failed_deps_str), "error") @@ -1279,7 +1279,7 @@ def delete(self): try: delete_dag.delete_dag(dag_id) except DagNotFound: - flash("DAG with id {} not found. Cannot delete".format(dag_id), 'error') + flash(f"DAG with id {dag_id} not found. Cannot delete", 'error') return redirect(request.referrer) except DagFileExists: flash("Dag id {} is still in DagBag. " @@ -1328,7 +1328,7 @@ def trigger(self, session=None): dag_orm = session.query(models.DagModel).filter(models.DagModel.dag_id == dag_id).first() if not dag_orm: - flash("Cannot find dag {}".format(dag_id)) + flash(f"Cannot find dag {dag_id}") return redirect(origin) execution_date = timezone.utcnow() @@ -1377,7 +1377,7 @@ def _clear_dag_tis(self, dag, start_date, end_date, origin, only_failed=only_failed, ) - flash("{0} task instances have been cleared".format(count)) + flash(f"{count} task instances have been cleared") return redirect(origin) try: @@ -1433,7 +1433,7 @@ def clear(self): only_failed = request.form.get('only_failed') == "true" dag = dag.sub_dag( - task_ids_or_regex=r"^{0}$".format(task_id), + task_ids_or_regex=fr"^{task_id}$", include_downstream=downstream, include_upstream=upstream) @@ -1522,7 +1522,7 @@ def _mark_dagrun_state_as_failed(self, dag_id, execution_date, confirmed, origin dag = current_app.dag_bag.get_dag(dag_id) if not dag: - flash('Cannot find DAG: {}'.format(dag_id), 'error') + flash(f'Cannot find DAG: {dag_id}', 'error') return redirect(origin) new_dag_state = set_dag_run_state_to_failed(dag, execution_date, commit=confirmed) @@ -1550,7 +1550,7 @@ def _mark_dagrun_state_as_success(self, dag_id, execution_date, confirmed, origi dag = current_app.dag_bag.get_dag(dag_id) if not dag: - flash('Cannot find DAG: {}'.format(dag_id), 'error') + flash(f'Cannot find DAG: {dag_id}', 'error') return redirect(origin) new_dag_state = set_dag_run_state_to_success(dag, execution_date, @@ -1635,7 +1635,7 @@ def _mark_task_instance_state(self, # pylint: disable=too-many-arguments response = self.render_template( "airflow/confirm.html", - message=("Here's the list of task instances you are about to mark as {}:".format(state)), + message=(f"Here's the list of task instances you are about to mark as {state}:"), details=details) return response @@ -1700,7 +1700,7 @@ def tree(self): blur = conf.getboolean('webserver', 'demo_mode') dag = current_app.dag_bag.get_dag(dag_id) if not dag: - flash('DAG "{0}" seems to be missing from DagBag.'.format(dag_id), "error") + flash(f'DAG "{dag_id}" seems to be missing from DagBag.', "error") return redirect(url_for('Airflow.index')) root = request.args.get('root') @@ -1870,7 +1870,7 @@ def graph(self, session=None): blur = conf.getboolean('webserver', 'demo_mode') dag = current_app.dag_bag.get_dag(dag_id) if not dag: - flash('DAG "{0}" seems to be missing.'.format(dag_id), "error") + flash(f'DAG "{dag_id}" seems to be missing.', "error") return redirect(url_for('Airflow.index')) root = request.args.get('root') @@ -1966,7 +1966,7 @@ def duration(self, session=None): num_runs = int(num_runs) if num_runs else default_dag_run if dag is None: - flash('DAG "{0}" seems to be missing.'.format(dag_id), "error") + flash(f'DAG "{dag_id}" seems to be missing.', "error") return redirect(url_for('Airflow.index')) if base_date: @@ -2026,10 +2026,10 @@ def duration(self, session=None): cum_y_unit = infer_time_unit([d for t in cumulative_y.values() for d in t]) # update the y Axis on both charts to have the correct time units chart.create_y_axis('yAxis', format='.02f', custom_format=False, - label='Duration ({})'.format(y_unit)) + label=f'Duration ({y_unit})') chart.axislist['yAxis']['axisLabelDistance'] = '-15' cum_chart.create_y_axis('yAxis', format='.02f', custom_format=False, - label='Duration ({})'.format(cum_y_unit)) + label=f'Duration ({cum_y_unit})') cum_chart.axislist['yAxis']['axisLabelDistance'] = '-15' for task in dag.tasks: @@ -2189,7 +2189,7 @@ def landing_times(self, session=None): y_unit = infer_time_unit([d for t in y_points.values() for d in t]) # update the y Axis to have the correct time units chart.create_y_axis('yAxis', format='.02f', custom_format=False, - label='Landing Time ({})'.format(y_unit)) + label=f'Landing Time ({y_unit})') chart.axislist['yAxis']['axisLabelDistance'] = '-15' for task in dag.tasks: if x_points[task.task_id]: @@ -2250,7 +2250,7 @@ def refresh(self, session=None): # sync dag permission current_app.appbuilder.sm.sync_perm_for_dag(dag_id, dag.access_control) - flash("DAG [{}] is now fresh as a daisy".format(dag_id)) + flash(f"DAG [{dag_id}] is now fresh as a daisy") return redirect(request.referrer) @expose('/refresh_all', methods=['POST']) @@ -2419,7 +2419,7 @@ def extra_links(self): return response else: response = jsonify( - {'url': None, 'error': 'No URL found for {dest}'.format(dest=link_name)}) + {'url': None, 'error': f'No URL found for {link_name}'}) response.status_code = 404 return response @@ -2466,7 +2466,7 @@ def conf(self): subtitle = AIRFLOW_CONFIG # Don't show config when expose_config variable is False in airflow config if conf.getboolean("webserver", "expose_config"): - with open(AIRFLOW_CONFIG, 'r') as file: + with open(AIRFLOW_CONFIG) as file: config = file.read() table = [(section, key, value, source) for section, parameters in conf.as_dict(True, True).items() @@ -2958,9 +2958,9 @@ def varimport(self): fail_count += 1 else: suc_count += 1 - flash("{} variable(s) successfully updated.".format(suc_count)) + flash(f"{suc_count} variable(s) successfully updated.") if fail_count: - flash("{} variable(s) failed to be updated.".format(fail_count), 'error') + flash(f"{fail_count} variable(s) failed to be updated.", 'error') self.update_redirect() return redirect(self.get_redirect()) @@ -3067,7 +3067,7 @@ def action_set_running(self, drs, session=None): dr.start_date = timezone.utcnow() dr.state = State.RUNNING session.commit() - flash("{count} dag runs were set to running".format(count=count)) + flash(f"{count} dag runs were set to running") except Exception as ex: # pylint: disable=broad-except flash(str(ex), 'error') flash('Failed to set state', 'error') @@ -3312,7 +3312,7 @@ def action_clear(self, task_instances, session=None): models.clear_task_instances(task_instances_list, session, dag=dag) session.commit() - flash("{0} task instances have been cleared".format(len(task_instances))) + flash("{} task instances have been cleared".format(len(task_instances))) self.update_redirect() return redirect(self.get_redirect()) except Exception: # noqa pylint: disable=broad-except diff --git a/breeze-complete b/breeze-complete index 33f48477e8f90..1d65d1792be56 100644 --- a/breeze-complete +++ b/breeze-complete @@ -109,6 +109,7 @@ pydocstyle pylint pylint-tests python-no-log-warn +pyupgrade restrict-start_date rst-backticks setup-order diff --git a/chart/tests/test_chart_quality.py b/chart/tests/test_chart_quality.py index 389894151d26e..32237cb7aa43d 100644 --- a/chart/tests/test_chart_quality.py +++ b/chart/tests/test_chart_quality.py @@ -28,9 +28,9 @@ class ChartQualityTest(unittest.TestCase): def test_values_validate_schema(self): - with open(os.path.join(CHART_FOLDER, "values.yaml"), "r") as f: + with open(os.path.join(CHART_FOLDER, "values.yaml")) as f: values = yaml.safe_load(f) - with open(os.path.join(CHART_FOLDER, "values.schema.json"), "r") as f: + with open(os.path.join(CHART_FOLDER, "values.schema.json")) as f: schema = json.load(f) # Add extra restrictions just for the tests to make sure diff --git a/dev/airflow-github b/dev/airflow-github index 1aaf3c350b905..17490f7c55ff8 100755 --- a/dev/airflow-github +++ b/dev/airflow-github @@ -185,7 +185,7 @@ def compare(target_version, github_token, previous_version=None, show_uncherrypi merged=cherrypicked, commit=commit_in_master if commit_in_master else "")) - print("Commits on branch: {0:d}, {1:d} ({2}) yet to be cherry-picked".format( + print("Commits on branch: {:d}, {:d} ({}) yet to be cherry-picked".format( num_cherrypicked, sum(num_uncherrypicked.values()), dict(num_uncherrypicked))) @@ -197,7 +197,7 @@ def changelog(previous_version, target_version, github_token): repo = git.Repo(".", search_parent_directories=True) # Get a list of issues/PRs that have been committed on the current branch. log_args = [ - '--format={}'.format(GIT_LOG_FORMAT), previous_version + ".." + target_version] + f'--format={GIT_LOG_FORMAT}', previous_version + ".." + target_version] log = repo.git.log(*log_args) log = log.strip('\n\x1e').split("\x1e") diff --git a/dev/airflow-license b/dev/airflow-license index 367d8ffb58103..3487309ea38d3 100755 --- a/dev/airflow-license +++ b/dev/airflow-license @@ -31,16 +31,16 @@ _licenses = {'MIT': ['Permission is hereby granted free of charge', 'The above c def get_notices(): - license_file = open("../LICENSE", "r") + license_file = open("../LICENSE") regex = r"\((.+?)\) (.+?) \((http.+?)\)" return list(filter(None, [re.findall(regex, line) for line in license_file])) def parse_license_file(project_name): - name = re.match("^[a-z0-9\-]+", project_name.lower()) + name = re.match(r"^[a-z0-9\-]+", project_name.lower()) name = slugify.slugify(name.group(0)) - path = "../licenses/LICENSE-{}.txt".format(name) + path = f"../licenses/LICENSE-{name}.txt" if os.path.exists(path): data = " ".join(line.strip() for line in open(path)).lower() data = data.translate(None, string.punctuation) diff --git a/docs/exts/docroles.py b/docs/exts/docroles.py index 58346ab0f6adc..5dd71cd1bfc6d 100644 --- a/docs/exts/docroles.py +++ b/docs/exts/docroles.py @@ -43,17 +43,17 @@ def get_template_field(env, fullname): with mock(env.config.autodoc_mock_imports): mod = import_module(modname) except ImportError: - raise RoleException("Error loading %s module." % (modname, )) + raise RoleException(f"Error loading {modname} module.") clazz = getattr(mod, classname) if not clazz: - raise RoleException("Error finding %s class in %s module." % (classname, modname)) + raise RoleException(f"Error finding {classname} class in {modname} module.") template_fields = getattr(clazz, "template_fields") if not template_fields: raise RoleException( - "Could not find the template fields for %s class in %s module." % (classname, modname) + f"Could not find the template fields for {classname} class in {modname} module." ) return list(template_fields) @@ -90,7 +90,7 @@ def template_field_role(app, try: template_fields = get_template_field(app.env, text) except RoleException as e: - msg = inliner.reporter.error("invalid class name %s \n%s" % (text, e, ), line=lineno) + msg = inliner.reporter.error(f"invalid class name {text} \n{e}", line=lineno) prb = inliner.problematic(rawtext, rawtext, msg) return [prb], [msg] diff --git a/docs/exts/sphinx_script_update.py b/docs/exts/sphinx_script_update.py index 4b3930a5b3eac..e5d6062001af1 100644 --- a/docs/exts/sphinx_script_update.py +++ b/docs/exts/sphinx_script_update.py @@ -60,7 +60,7 @@ def fetch_and_cache(script_url: str, output_filename: str): cache_metadata: Dict[str, str] = {} if os.path.exists(cache_metadata_filepath): try: - with open(cache_metadata_filepath, "r") as cache_file: + with open(cache_metadata_filepath) as cache_file: cache_metadata = json.load(cache_file) except json.JSONDecodeError: os.remove(cache_metadata_filepath) diff --git a/kubernetes_tests/test_kubernetes_executor.py b/kubernetes_tests/test_kubernetes_executor.py index 948ec96413767..de2726aca7f74 100644 --- a/kubernetes_tests/test_kubernetes_executor.py +++ b/kubernetes_tests/test_kubernetes_executor.py @@ -40,8 +40,8 @@ class TestKubernetesExecutor(unittest.TestCase): @staticmethod def _describe_resources(namespace: str): print("=" * 80) - print("Describe resources for namespace {}".format(namespace)) - print("Datetime: {}".format(datetime.utcnow())) + print(f"Describe resources for namespace {namespace}") + print(f"Datetime: {datetime.utcnow()}") print("=" * 80) print("Describing pods") print("-" * 80) @@ -80,7 +80,7 @@ def _get_session_with_retries(self): def _ensure_airflow_webserver_is_healthy(self): response = self.session.get( - "http://{host}/health".format(host=KUBERNETES_HOST_PORT), + f"http://{KUBERNETES_HOST_PORT}/health", timeout=1, ) @@ -116,7 +116,7 @@ def monitor_task(self, host, execution_date, dag_id, task_id, expected_final_sta result_json = result.json() print(f"Received [monitor_task]#2: {result_json}") state = result_json['state'] - print("Attempt {}: Current state of operator is {}".format(tries, state)) + print(f"Attempt {tries}: Current state of operator is {state}") if state == expected_final_state: break @@ -124,9 +124,9 @@ def monitor_task(self, host, execution_date, dag_id, task_id, expected_final_sta self._describe_resources(namespace="default") tries += 1 except requests.exceptions.ConnectionError as e: - check_call(["echo", "api call failed. trying again. error {}".format(e)]) + check_call(["echo", f"api call failed. trying again. error {e}"]) if state != expected_final_state: - print("The expected state is wrong {} != {} (expected)!".format(state, expected_final_state)) + print(f"The expected state is wrong {state} != {expected_final_state} (expected)!") self.assertEqual(state, expected_final_state) def ensure_dag_expected_state(self, host, execution_date, dag_id, @@ -149,8 +149,8 @@ def ensure_dag_expected_state(self, host, execution_date, dag_id, print(f"Received: {result}") state = result_json['state'] check_call( - ["echo", "Attempt {}: Current state of dag is {}".format(tries, state)]) - print("Attempt {}: Current state of dag is {}".format(tries, state)) + ["echo", f"Attempt {tries}: Current state of dag is {state}"]) + print(f"Attempt {tries}: Current state of dag is {state}") if state == expected_final_state: break diff --git a/kubernetes_tests/test_kubernetes_pod_operator.py b/kubernetes_tests/test_kubernetes_pod_operator.py index dec6dcf3594f5..c78a821b3b30c 100644 --- a/kubernetes_tests/test_kubernetes_pod_operator.py +++ b/kubernetes_tests/test_kubernetes_pod_operator.py @@ -572,7 +572,7 @@ def test_pod_failure(self): def test_xcom_push(self): return_value = '{"foo": "bar"\n, "buzz": 2}' - args = ['echo \'{}\' > /airflow/xcom/return.json'.format(return_value)] + args = [f'echo \'{return_value}\' > /airflow/xcom/return.json'] k = KubernetesPodOperator( namespace='default', image="ubuntu:16.04", diff --git a/metastore_browser/hive_metastore.py b/metastore_browser/hive_metastore.py index 98413695155c5..6335891dba9d0 100644 --- a/metastore_browser/hive_metastore.py +++ b/metastore_browser/hive_metastore.py @@ -129,10 +129,10 @@ def objects(self): where_clause = '' if DB_ALLOW_LIST: dbs = ",".join(["'" + db + "'" for db in DB_ALLOW_LIST]) - where_clause = "AND b.name IN ({})".format(dbs) + where_clause = f"AND b.name IN ({dbs})" if DB_DENY_LIST: dbs = ",".join(["'" + db + "'" for db in DB_DENY_LIST]) - where_clause = "AND b.name NOT IN ({})".format(dbs) + where_clause = f"AND b.name NOT IN ({dbs})" sql = """ SELECT CONCAT(b.NAME, '.', a.TBL_NAME), TBL_TYPE FROM TBLS a @@ -156,7 +156,7 @@ def objects(self): def data(self): """Retrieve data from table""" table = request.args.get("table") - sql = "SELECT * FROM {table} LIMIT 1000;".format(table=table) + sql = f"SELECT * FROM {table} LIMIT 1000;" hook = PrestoHook(PRESTO_CONN_ID) df = hook.get_pandas_df(sql) return df.to_html( @@ -168,7 +168,7 @@ def data(self): def ddl(self): """Retrieve table ddl""" table = request.args.get("table") - sql = "SHOW CREATE TABLE {table};".format(table=table) + sql = f"SHOW CREATE TABLE {table};" hook = HiveCliHook(HIVE_CLI_CONN_ID) return hook.run_cli(sql) diff --git a/provider_packages/prepare_provider_packages.py b/provider_packages/prepare_provider_packages.py index 1a1790802a960..ef12b2aedfa7e 100644 --- a/provider_packages/prepare_provider_packages.py +++ b/provider_packages/prepare_provider_packages.py @@ -276,7 +276,7 @@ def get_package_extras(provider_package_id: str, backport_packages: bool) -> Dic """ if provider_package_id == 'providers': return {} - with open(DEPENDENCIES_JSON_FILE, "rt") as dependencies_file: + with open(DEPENDENCIES_JSON_FILE) as dependencies_file: cross_provider_dependencies: Dict[str, List[str]] = json.load(dependencies_file) extras_dict = {module: [get_pip_package_name(module, backport_packages=backport_packages)] for module in cross_provider_dependencies[provider_package_id]} \ @@ -798,7 +798,7 @@ def get_all_releases(provider_package_path: str, backport_packages: bool) -> Lis for file_name in sorted(changes_file_names, reverse=True): if file_name.startswith(changes_file_prefix) and file_name.endswith(".md"): changes_file_path = os.path.join(provider_package_path, file_name) - with open(changes_file_path, "rt") as changes_file: + with open(changes_file_path) as changes_file: content = changes_file.read() found = re.search(r'/([a-z0-9]*)\)', content, flags=re.MULTILINE) if not found: @@ -892,7 +892,7 @@ def get_cross_provider_dependent_packages(provider_package_id: str) -> List[str] :param provider_package_id: package id :return: list of cross-provider dependencies """ - with open(os.path.join(PROVIDERS_PATH, "dependencies.json"), "rt") as dependencies_file: + with open(os.path.join(PROVIDERS_PATH, "dependencies.json")) as dependencies_file: dependent_packages = json.load(dependencies_file).get(provider_package_id) or [] return dependent_packages @@ -973,7 +973,7 @@ def get_additional_package_info(provider_package_path: str) -> str: """ additional_info_file_path = os.path.join(provider_package_path, "ADDITIONAL_INFO.md") if os.path.isfile(additional_info_file_path): - with open(additional_info_file_path, "rt") as additional_info_file: + with open(additional_info_file_path) as additional_info_file: additional_info = additional_info_file.read() additional_info_lines = additional_info.splitlines(keepends=True) @@ -1151,7 +1151,7 @@ def prepare_readme_and_changes_files(backport_packages, context, current_release "BACKPORT_PROVIDER_README.md" if backport_packages else "README.md") old_text = "" if os.path.isfile(readme_file_path): - with open(readme_file_path, "rt") as readme_file_read: + with open(readme_file_path) as readme_file_read: old_text = readme_file_read.read() if old_text != readme: _, temp_file_path = tempfile.mkstemp(".md") @@ -1333,7 +1333,7 @@ def copy_readme_and_changelog(provider_package_id: str, readme_target = os.path.join(MY_DIR_PATH, "README.md") copyfile(readme_source, readme_target) changelog_target = os.path.join(MY_DIR_PATH, "CHANGELOG.txt") - with open(readme_source, "rt") as infile, open(changelog_target, 'wt') as outfile: + with open(readme_source) as infile, open(changelog_target, 'wt') as outfile: copy = False for line in infile: if line.strip() == "## Releases": diff --git a/provider_packages/refactor_provider_packages.py b/provider_packages/refactor_provider_packages.py index 4cecf3409279f..3866975eed3cf 100755 --- a/provider_packages/refactor_provider_packages.py +++ b/provider_packages/refactor_provider_packages.py @@ -76,7 +76,7 @@ def copy_helper_py_file(target_file_path: str) -> None: source_helper_file_path = os.path.join(get_source_airflow_folder(), "airflow", "utils", "helpers.py") - with open(source_helper_file_path, "rt") as in_file: + with open(source_helper_file_path) as in_file: with open(target_file_path, "wt") as out_file: for line in in_file: out_file.write(line.replace('airflow.models.baseoperator', 'airflow.utils.helpers')) diff --git a/scripts/ci/pre_commit/pre_commit_check_order_setup.py b/scripts/ci/pre_commit/pre_commit_check_order_setup.py index 57f56624edfbc..f255c2d0e34de 100755 --- a/scripts/ci/pre_commit/pre_commit_check_order_setup.py +++ b/scripts/ci/pre_commit/pre_commit_check_order_setup.py @@ -77,7 +77,7 @@ def check_sub_dependent_group(setup_context: str) -> None: for group_name in dependent_group_names: pattern_sub_dependent = re.compile( - '{group_name} = \\[(.*?)\\]'.format(group_name=group_name), re.DOTALL) + f'{group_name} = \\[(.*?)\\]', re.DOTALL) sub_dependent = pattern_sub_dependent.findall(setup_context)[0] pattern_dependent = re.compile('\'(.*?)\'') dependent = pattern_dependent.findall(sub_dependent) diff --git a/scripts/ci/pre_commit/pre_commit_insert_extras.py b/scripts/ci/pre_commit/pre_commit_insert_extras.py index b95fe992eae93..ce6706975ef14 100755 --- a/scripts/ci/pre_commit/pre_commit_insert_extras.py +++ b/scripts/ci/pre_commit/pre_commit_insert_extras.py @@ -38,7 +38,7 @@ def insert_documentation(file_path: str, content: List[str], header: str, footer: str): - with open(file_path, "r") as documentation_file: + with open(file_path) as documentation_file: replacing = False result: List[str] = [] text = documentation_file.readlines() diff --git a/scripts/in_container/update_quarantined_test_status.py b/scripts/in_container/update_quarantined_test_status.py index 8a84879aa3f9a..ba5bf3f3209c4 100755 --- a/scripts/in_container/update_quarantined_test_status.py +++ b/scripts/in_container/update_quarantined_test_status.py @@ -178,7 +178,7 @@ def get_table(history_map: Dict[str, TestHistory]) -> str: print("Provide XML JUNIT FILE as first argument") sys.exit(1) - with open(sys.argv[1], "r") as f: + with open(sys.argv[1]) as f: text = f.read() y = BeautifulSoup(text, "html.parser") res = y.testsuites.testsuite.findAll("testcase") @@ -233,7 +233,7 @@ def get_table(history_map: Dict[str, TestHistory]) -> str: print() print(table) print() - with open(join(dirname(realpath(__file__)), "quarantine_issue_header.md"), "r") as f: + with open(join(dirname(realpath(__file__)), "quarantine_issue_header.md")) as f: header = jinja2.Template(f.read(), autoescape=True, undefined=StrictUndefined).\ render(DATE_UTC_NOW=datetime.utcnow()) quarantined_issue.edit(title=None, diff --git a/scripts/tools/list-integrations.py b/scripts/tools/list-integrations.py index 6190c8e4811e3..73c514c7c1ee3 100755 --- a/scripts/tools/list-integrations.py +++ b/scripts/tools/list-integrations.py @@ -61,7 +61,7 @@ def _find_clazzes(directory, base_class): ] for found_clazz in integration_clazzes: - found_classes.add("{}.{}".format(found_clazz.__module__, found_clazz.__name__)) + found_classes.add(f"{found_clazz.__module__}.{found_clazz.__name__}") return found_classes diff --git a/setup.py b/setup.py index 0f10995c91eff..f0872b0c8d8bc 100644 --- a/setup.py +++ b/setup.py @@ -145,9 +145,9 @@ def git_version(version_: str) -> str: if repo: sha = repo.head.commit.hexsha if repo.is_dirty(): - return '.dev0+{sha}.dirty'.format(sha=sha) + return f'.dev0+{sha}.dirty' # commit is clean - return '.release:{version}+{sha}'.format(version=version_, sha=sha) + return f'.release:{version_}+{sha}' else: return 'no_git_version' diff --git a/tests/api_connexion/endpoints/test_log_endpoint.py b/tests/api_connexion/endpoints/test_log_endpoint.py index 26ebb14e32018..1f74f76df3e3b 100644 --- a/tests/api_connexion/endpoints/test_log_endpoint.py +++ b/tests/api_connexion/endpoints/test_log_endpoint.py @@ -102,7 +102,7 @@ def _configure_loggers(self): # Write the custom logging configuration to a file self.settings_folder = tempfile.mkdtemp() settings_file = os.path.join(self.settings_folder, "airflow_local_settings.py") - new_logging_file = "LOGGING_CONFIG = {}".format(logging_config) + new_logging_file = f"LOGGING_CONFIG = {logging_config}" with open(settings_file, 'w') as handle: handle.writelines(new_logging_file) sys.path.append(self.settings_folder) diff --git a/tests/build_provider_packages_dependencies.py b/tests/build_provider_packages_dependencies.py index 87cc09afa84c9..9542759a73c92 100644 --- a/tests/build_provider_packages_dependencies.py +++ b/tests/build_provider_packages_dependencies.py @@ -129,7 +129,7 @@ def get_import_name_from_import_from(self, node: ImportFrom) -> List[str]: # no import_names: List[str] = [] for alias in node.names: name = alias.name - fullname = '%s.%s' % (node.module, name) if node.module else name + fullname = f'{node.module}.{name}' if node.module else name import_names.append(fullname) return import_names @@ -151,7 +151,7 @@ def get_imports_from_file(file_name: str) -> List[str]: :return: list of import names """ try: - with open(file_name, "rt", encoding="utf-8") as f: + with open(file_name, encoding="utf-8") as f: root = parse(f.read(), file_name) except Exception: print(f"Error when opening file {file_name}", file=sys.stderr) @@ -244,7 +244,7 @@ def insert_documentation(deps_dict: Dict[str, List[str]], res: List[str]): print(f"Written provider dependencies to the file {provider_dependencies_file_name}") print() if documentation_file_name: - with open(documentation_file_name, "r", encoding="utf-8") as documentation_file: + with open(documentation_file_name, encoding="utf-8") as documentation_file: text = documentation_file.readlines() replacing = False result: List[str] = [] diff --git a/tests/cli/commands/test_dag_command.py b/tests/cli/commands/test_dag_command.py index 8fe2d2ad32b4d..87b58320389a7 100644 --- a/tests/cli/commands/test_dag_command.py +++ b/tests/cli/commands/test_dag_command.py @@ -96,7 +96,7 @@ def test_backfill(self, mock_run): '--start-date', DEFAULT_DATE.isoformat()]), dag=dag) output = stdout.getvalue() - self.assertIn("Dry run of DAG example_bash_operator on {}\n".format(DEFAULT_DATE.isoformat()), output) + self.assertIn(f"Dry run of DAG example_bash_operator on {DEFAULT_DATE.isoformat()}\n", output) self.assertIn("Task runme_0\n", output) mock_run.assert_not_called() # Dry run shouldn't run the backfill diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index 3e373dbdc6c90..56eb584f9241a 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -65,7 +65,7 @@ class TestAirflowInfo(unittest.TestCase): def test_should_be_string(self): text = str(info_command.AirflowInfo(info_command.NullAnonymizer())) - self.assertIn("Apache Airflow [{}]".format(airflow_version), text) + self.assertIn(f"Apache Airflow [{airflow_version}]", text) class TestSystemInfo(unittest.TestCase): @@ -135,7 +135,7 @@ def test_show_info(self): info_command.show_info(self.parser.parse_args(["info"])) output = stdout.getvalue() - self.assertIn("Apache Airflow [{}]".format(airflow_version), output) + self.assertIn(f"Apache Airflow [{airflow_version}]", output) self.assertIn("postgresql+psycopg2://postgres:airflow@postgres/airflow", output) @conf_vars( @@ -148,7 +148,7 @@ def test_show_info_anonymize(self): info_command.show_info(self.parser.parse_args(["info", "--anonymize"])) output = stdout.getvalue() - self.assertIn("Apache Airflow [{}]".format(airflow_version), output) + self.assertIn(f"Apache Airflow [{airflow_version}]", output) self.assertIn("postgresql+psycopg2://p...s:PASSWORD@postgres/airflow", output) @conf_vars( diff --git a/tests/cli/commands/test_user_command.py b/tests/cli/commands/test_user_command.py index fb61e0fdbb1a9..70595430ab5c4 100644 --- a/tests/cli/commands/test_user_command.py +++ b/tests/cli/commands/test_user_command.py @@ -94,9 +94,9 @@ def test_cli_delete_user(self): def test_cli_list_users(self): for i in range(0, 3): args = self.parser.parse_args([ - 'users', 'create', '--username', 'user{}'.format(i), '--lastname', + 'users', 'create', '--username', f'user{i}', '--lastname', 'doe', '--firstname', 'jon', - '--email', 'jdoe+{}@gmail.com'.format(i), '--role', 'Viewer', + '--email', f'jdoe+{i}@gmail.com', '--role', 'Viewer', '--use-random-password' ]) user_command.users_create(args) @@ -104,7 +104,7 @@ def test_cli_list_users(self): user_command.users_list(self.parser.parse_args(['users', 'list'])) stdout = stdout.getvalue() for i in range(0, 3): - self.assertIn('user{}'.format(i), stdout) + self.assertIn(f'user{i}', stdout) def test_cli_list_users_with_args(self): user_command.users_list(self.parser.parse_args(['users', 'list', '--output', 'tsv'])) @@ -176,7 +176,7 @@ def test_cli_export_users(self): def find_by_username(username): matches = [u for u in retrieved_users if u['username'] == username] if not matches: - self.fail("Couldn't find user with username {}".format(username)) + self.fail(f"Couldn't find user with username {username}") matches[0].pop('id') # this key not required for import return matches[0] diff --git a/tests/cli/commands/test_variable_command.py b/tests/cli/commands/test_variable_command.py index 1aefdcfe96f10..71a4d9842caf1 100644 --- a/tests/cli/commands/test_variable_command.py +++ b/tests/cli/commands/test_variable_command.py @@ -149,7 +149,7 @@ def test_variables_isolation(self): variable_command.variables_export(self.parser.parse_args([ 'variables', 'export', tmp1.name])) - first_exp = open(tmp1.name, 'r') + first_exp = open(tmp1.name) variable_command.variables_set(self.parser.parse_args([ 'variables', 'set', 'bar', 'updated'])) @@ -167,7 +167,7 @@ def test_variables_isolation(self): variable_command.variables_export(self.parser.parse_args([ 'variables', 'export', tmp2.name])) - second_exp = open(tmp2.name, 'r') + second_exp = open(tmp2.name) self.assertEqual(first_exp.read(), second_exp.read()) # Clean up files diff --git a/tests/cli/commands/test_webserver_command.py b/tests/cli/commands/test_webserver_command.py index ad4fd12d94a41..2468466fb6513 100644 --- a/tests/cli/commands/test_webserver_command.py +++ b/tests/cli/commands/test_webserver_command.py @@ -288,7 +288,7 @@ def test_cli_webserver_foreground(self): def test_cli_webserver_foreground_with_pid(self): with tempfile.TemporaryDirectory(prefix='tmp-pid') as tmpdir: - pidfile = "{}/pidfile".format(tmpdir) + pidfile = f"{tmpdir}/pidfile" with mock.patch.dict( "os.environ", AIRFLOW__CORE__DAGS_FOLDER="/dev/null", @@ -313,11 +313,11 @@ def test_cli_webserver_background(self): AIRFLOW__CORE__DAGS_FOLDER="/dev/null", AIRFLOW__CORE__LOAD_EXAMPLES="False", AIRFLOW__WEBSERVER__WORKERS="1"): - pidfile_webserver = "{}/pidflow-webserver.pid".format(tmpdir) - pidfile_monitor = "{}/pidflow-webserver-monitor.pid".format(tmpdir) - stdout = "{}/airflow-webserver.out".format(tmpdir) - stderr = "{}/airflow-webserver.err".format(tmpdir) - logfile = "{}/airflow-webserver.log".format(tmpdir) + pidfile_webserver = f"{tmpdir}/pidflow-webserver.pid" + pidfile_monitor = f"{tmpdir}/pidflow-webserver-monitor.pid" + stdout = f"{tmpdir}/airflow-webserver.out" + stderr = f"{tmpdir}/airflow-webserver.err" + logfile = f"{tmpdir}/airflow-webserver.log" try: # Run webserver as daemon in background. Note that the wait method is not called. proc = subprocess.Popen([ @@ -350,7 +350,7 @@ def test_cli_webserver_background(self): # List all logs subprocess.Popen(["ls", "-lah", tmpdir]).wait() # Dump all logs - subprocess.Popen(["bash", "-c", "ls {}/* | xargs -n 1 -t cat".format(tmpdir)]).wait() + subprocess.Popen(["bash", "-c", f"ls {tmpdir}/* | xargs -n 1 -t cat"]).wait() raise # Patch for causing webserver timeout @@ -372,6 +372,6 @@ def test_cli_webserver_debug(self): self.assertEqual( None, return_code, - "webserver terminated with return code {} in debug mode".format(return_code)) + f"webserver terminated with return code {return_code} in debug mode") proc.terminate() self.assertEqual(-15, proc.wait(60)) diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py index 094bc61264ffb..7a07c3be02105 100644 --- a/tests/core/test_configuration.py +++ b/tests/core/test_configuration.py @@ -384,8 +384,8 @@ def test_getsection(self): def test_get_section_should_respect_cmd_env_variable(self): with tempfile.NamedTemporaryFile(delete=False) as cmd_file: - cmd_file.write("#!/usr/bin/env bash\n".encode()) - cmd_file.write("echo -n difficult_unpredictable_cat_password\n".encode()) + cmd_file.write(b"#!/usr/bin/env bash\n") + cmd_file.write(b"echo -n difficult_unpredictable_cat_password\n") cmd_file.flush() os.chmod(cmd_file.name, 0o0555) cmd_file.close() @@ -539,7 +539,7 @@ def make_config(): def test_deprecated_funcs(self): for func in ['load_test_config', 'get', 'getboolean', 'getfloat', 'getint', 'has_option', 'remove_option', 'as_dict', 'set']: - with mock.patch('airflow.configuration.conf.{}'.format(func)) as mock_method: + with mock.patch(f'airflow.configuration.conf.{func}') as mock_method: with self.assertWarns(DeprecationWarning): getattr(configuration, func)() mock_method.assert_called_once() @@ -630,9 +630,9 @@ def test_write_should_respect_env_variable(self): def test_run_command(self): write = r'sys.stdout.buffer.write("\u1000foo".encode("utf8"))' - cmd = 'import sys; {0}; sys.stdout.flush()'.format(write) + cmd = f'import sys; {write}; sys.stdout.flush()' - self.assertEqual(run_command("python -c '{0}'".format(cmd)), '\u1000foo') + self.assertEqual(run_command(f"python -c '{cmd}'"), '\u1000foo') self.assertEqual(run_command('echo "foo bar"'), 'foo bar\n') self.assertRaises(AirflowConfigException, run_command, 'bash -c "exit 1"') diff --git a/tests/core/test_core_to_contrib.py b/tests/core/test_core_to_contrib.py index c228fba7ef61d..e44287ccc7b26 100644 --- a/tests/core/test_core_to_contrib.py +++ b/tests/core/test_core_to_contrib.py @@ -31,12 +31,12 @@ class TestMovingCoreToContrib(TestCase): @staticmethod def assert_warning(msg: str, warning: Any): - error = "Text '{}' not in warnings".format(msg) + error = f"Text '{msg}' not in warnings" assert any(msg in str(w) for w in warning.warnings), error def assert_is_subclass(self, clazz, other): self.assertTrue( - issubclass(clazz, other), "{} is not subclass of {}".format(clazz, other) + issubclass(clazz, other), f"{clazz} is not subclass of {other}" ) def assert_proper_import(self, old_resource, new_resource): @@ -80,7 +80,7 @@ def test_is_class_deprecated(self, new_module, old_module): deprecation_warning_msg = "This class is deprecated." old_module_class = self.get_class_from_path(old_module) with self.assertWarnsRegex(DeprecationWarning, deprecation_warning_msg) as wrn: - with mock.patch("{}.__init__".format(new_module)) as init_mock: + with mock.patch(f"{new_module}.__init__") as init_mock: init_mock.return_value = None klass = old_module_class() if isinstance(klass, BaseOperator): @@ -93,7 +93,7 @@ def test_is_class_deprecated(self, new_module, old_module): @parameterized.expand(ALL) def test_is_subclass(self, parent_class_path, sub_class_path): self.skip_test_with_mssql_in_py38(parent_class_path, sub_class_path) - with mock.patch("{}.__init__".format(parent_class_path)): + with mock.patch(f"{parent_class_path}.__init__"): parent_class_path = self.get_class_from_path(parent_class_path, parent=True) sub_class_path = self.get_class_from_path(sub_class_path) self.assert_is_subclass(sub_class_path, parent_class_path) diff --git a/tests/core/test_local_settings.py b/tests/core/test_local_settings.py index 09e03f8e981f2..9f32d3583a09f 100644 --- a/tests/core/test_local_settings.py +++ b/tests/core/test_local_settings.py @@ -61,7 +61,7 @@ class SettingsContext: def __init__(self, content: str, module_name: str): self.content = content self.settings_root = tempfile.mkdtemp() - filename = "{}.py".format(module_name) + filename = f"{module_name}.py" self.settings_file = os.path.join(self.settings_root, filename) def __enter__(self): diff --git a/tests/core/test_stats.py b/tests/core/test_stats.py index 0b935653b8569..fd2acd7135ad7 100644 --- a/tests/core/test_stats.py +++ b/tests/core/test_stats.py @@ -255,7 +255,7 @@ def test_not_increment_counter_if_not_allowed(self): def always_invalid(stat_name): - raise InvalidStatsNameException("Invalid name: {}".format(stat_name)) + raise InvalidStatsNameException(f"Invalid name: {stat_name}") def always_valid(stat_name): diff --git a/tests/dags/test_impersonation_subdag.py b/tests/dags/test_impersonation_subdag.py index 9bc7310e3098e..a55d5227e749a 100644 --- a/tests/dags/test_impersonation_subdag.py +++ b/tests/dags/test_impersonation_subdag.py @@ -35,7 +35,7 @@ def print_today(): - print('Today is {}'.format(datetime.utcnow())) + print(f'Today is {datetime.utcnow()}') subdag = DAG('impersonation_subdag.test_subdag_operation', diff --git a/tests/dags/test_latest_runs.py b/tests/dags/test_latest_runs.py index b239cea09ab3d..a4ea8eb8afaea 100644 --- a/tests/dags/test_latest_runs.py +++ b/tests/dags/test_latest_runs.py @@ -23,7 +23,7 @@ from airflow.operators.dummy_operator import DummyOperator for i in range(1, 2): - dag = DAG(dag_id='test_latest_runs_{}'.format(i)) + dag = DAG(dag_id=f'test_latest_runs_{i}') task = DummyOperator( task_id='dummy_task', dag=dag, diff --git a/tests/dags/test_subdag.py b/tests/dags/test_subdag.py index 7b1ce9eb838d1..cc06095d10d26 100644 --- a/tests/dags/test_subdag.py +++ b/tests/dags/test_subdag.py @@ -41,14 +41,14 @@ def subdag(parent_dag_name, child_dag_name, args): Create a subdag. """ dag_subdag = DAG( - dag_id='%s.%s' % (parent_dag_name, child_dag_name), + dag_id=f'{parent_dag_name}.{child_dag_name}', default_args=args, schedule_interval="@daily", ) for i in range(2): DummyOperator( - task_id='%s-task-%s' % (child_dag_name, i + 1), + task_id='{}-task-{}'.format(child_dag_name, i + 1), default_args=args, dag=dag_subdag, ) diff --git a/tests/hooks/test_dbapi_hook.py b/tests/hooks/test_dbapi_hook.py index dc9235cdc3656..5e16aaf4cde80 100644 --- a/tests/hooks/test_dbapi_hook.py +++ b/tests/hooks/test_dbapi_hook.py @@ -94,7 +94,7 @@ def test_insert_rows(self): commit_count = 2 # The first and last commit self.assertEqual(commit_count, self.conn.commit.call_count) - sql = "INSERT INTO {} VALUES (%s)".format(table) + sql = f"INSERT INTO {table} VALUES (%s)" for row in rows: self.cur.execute.assert_any_call(sql, row) @@ -111,7 +111,7 @@ def test_insert_rows_replace(self): commit_count = 2 # The first and last commit self.assertEqual(commit_count, self.conn.commit.call_count) - sql = "REPLACE INTO {} VALUES (%s)".format(table) + sql = f"REPLACE INTO {table} VALUES (%s)" for row in rows: self.cur.execute.assert_any_call(sql, row) @@ -147,7 +147,7 @@ def test_insert_rows_commit_every(self): commit_count = 2 + divmod(len(rows), commit_every)[0] self.assertEqual(commit_count, self.conn.commit.call_count) - sql = "INSERT INTO {} VALUES (%s)".format(table) + sql = f"INSERT INTO {table} VALUES (%s)" for row in rows: self.cur.execute.assert_any_call(sql, row) diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index 383200f976b32..b9a3b6a754a96 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -1454,7 +1454,7 @@ def test_reset_orphaned_tasks_with_orphans(self): schedule_interval="@daily") tasks = [] for i in range(len(states)): - task_id = "{}_task_{}".format(prefix, i) + task_id = f"{prefix}_task_{i}" task = DummyOperator(task_id=task_id, dag=dag) tasks.append(task) diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 71a4402a3d131..a7589ef57dca6 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -2966,7 +2966,7 @@ def test_add_unparseable_file_before_sched_start_creates_import_error(self): self.assertEqual(import_error.filename, unparseable_filename) self.assertEqual(import_error.stacktrace, - "invalid syntax ({}, line 1)".format(TEMP_DAG_FILENAME)) + f"invalid syntax ({TEMP_DAG_FILENAME}, line 1)") @conf_vars({("core", "dagbag_import_error_tracebacks"): "False"}) def test_add_unparseable_file_after_sched_start_creates_import_error(self): @@ -2996,7 +2996,7 @@ def test_add_unparseable_file_after_sched_start_creates_import_error(self): self.assertEqual(import_error.filename, unparseable_filename) self.assertEqual(import_error.stacktrace, - "invalid syntax ({}, line 1)".format(TEMP_DAG_FILENAME)) + f"invalid syntax ({TEMP_DAG_FILENAME}, line 1)") def test_no_import_errors_with_parseable_dag(self): try: @@ -3043,7 +3043,7 @@ def test_new_import_error_replaces_old(self): self.assertEqual(import_error.filename, unparseable_filename) self.assertEqual(import_error.stacktrace, - "invalid syntax ({}, line 2)".format(TEMP_DAG_FILENAME)) + f"invalid syntax ({TEMP_DAG_FILENAME}, line 2)") def test_remove_error_clears_import_error(self): try: @@ -3221,7 +3221,7 @@ def test_list_py_file_paths(self): if file_name.endswith('.py') or file_name.endswith('.zip'): if file_name not in ignored_files: expected_files.add( - '{}/{}'.format(root, file_name)) + f'{root}/{file_name}') for file_path in list_py_file_paths(TEST_DAG_FOLDER, include_examples=False): detected_files.add(file_path) self.assertEqual(detected_files, expected_files) diff --git a/tests/kubernetes/test_pod_launcher.py b/tests/kubernetes/test_pod_launcher.py index 812ce5b6bd340..b85ac2bc5bb59 100644 --- a/tests/kubernetes/test_pod_launcher.py +++ b/tests/kubernetes/test_pod_launcher.py @@ -130,11 +130,11 @@ def test_read_pod_events_retries_successfully(self): self.mock_kube_client.list_namespaced_event.assert_has_calls([ mock.call( namespace=mock.sentinel.metadata.namespace, - field_selector="involvedObject.name={}".format(mock.sentinel.metadata.name) + field_selector=f"involvedObject.name={mock.sentinel.metadata.name}" ), mock.call( namespace=mock.sentinel.metadata.namespace, - field_selector="involvedObject.name={}".format(mock.sentinel.metadata.name) + field_selector=f"involvedObject.name={mock.sentinel.metadata.name}" ) ]) diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index cac0a5172ad23..8f835afd39e0a 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -282,8 +282,8 @@ class TestBaseOperatorMethods(unittest.TestCase): def test_cross_downstream(self): """Test if all dependencies between tasks are all set correctly.""" dag = DAG(dag_id="test_dag", start_date=datetime.now()) - start_tasks = [DummyOperator(task_id="t{i}".format(i=i), dag=dag) for i in range(1, 4)] - end_tasks = [DummyOperator(task_id="t{i}".format(i=i), dag=dag) for i in range(4, 7)] + start_tasks = [DummyOperator(task_id=f"t{i}", dag=dag) for i in range(1, 4)] + end_tasks = [DummyOperator(task_id=f"t{i}", dag=dag) for i in range(4, 7)] cross_downstream(from_tasks=start_tasks, to_tasks=end_tasks) for start_task in start_tasks: @@ -292,7 +292,7 @@ def test_cross_downstream(self): def test_chain(self): dag = DAG(dag_id='test_chain', start_date=datetime.now()) [op1, op2, op3, op4, op5, op6] = [ - DummyOperator(task_id='t{i}'.format(i=i), dag=dag) + DummyOperator(task_id=f't{i}', dag=dag) for i in range(1, 7) ] chain(op1, [op2, op3], [op4, op5], op6) @@ -304,13 +304,13 @@ def test_chain(self): def test_chain_not_support_type(self): dag = DAG(dag_id='test_chain', start_date=datetime.now()) - [op1, op2] = [DummyOperator(task_id='t{i}'.format(i=i), dag=dag) for i in range(1, 3)] + [op1, op2] = [DummyOperator(task_id=f't{i}', dag=dag) for i in range(1, 3)] with self.assertRaises(TypeError): chain([op1, op2], 1) # noqa def test_chain_different_length_iterable(self): dag = DAG(dag_id='test_chain', start_date=datetime.now()) - [op1, op2, op3, op4, op5] = [DummyOperator(task_id='t{i}'.format(i=i), dag=dag) for i in range(1, 6)] + [op1, op2, op3, op4, op5] = [DummyOperator(task_id=f't{i}', dag=dag) for i in range(1, 6)] with self.assertRaises(AirflowException): chain([op1, op2], [op3, op4, op5]) diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index c3acc2d0200e6..7fd0f2dbab695 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -387,7 +387,7 @@ def test_dag_task_priority_weight_total(self): default_args={'owner': 'owner1'}) as dag: pipeline = [ [DummyOperator( - task_id='stage{}.{}'.format(i, j), priority_weight=weight) + task_id=f'stage{i}.{j}', priority_weight=weight) for j in range(0, width)] for i in range(0, depth) ] for i, stage in enumerate(pipeline): @@ -416,7 +416,7 @@ def test_dag_task_priority_weight_total_using_upstream(self): default_args={'owner': 'owner1'}) as dag: pipeline = [ [DummyOperator( - task_id='stage{}.{}'.format(i, j), priority_weight=weight, + task_id=f'stage{i}.{j}', priority_weight=weight, weight_rule=WeightRule.UPSTREAM) for j in range(0, width)] for i in range(0, depth) ] @@ -445,7 +445,7 @@ def test_dag_task_priority_weight_total_using_absolute(self): default_args={'owner': 'owner1'}) as dag: pipeline = [ [DummyOperator( - task_id='stage{}.{}'.format(i, j), priority_weight=weight, + task_id=f'stage{i}.{j}', priority_weight=weight, weight_rule=WeightRule.ABSOLUTE) for j in range(0, width)] for i in range(0, depth) ] @@ -1113,7 +1113,7 @@ def test_schedule_dag_no_previous_runs(self): self.assertEqual( TEST_DATE, dag_run.execution_date, - msg='dag_run.execution_date did not match expectation: {0}' + msg='dag_run.execution_date did not match expectation: {}' .format(dag_run.execution_date) ) self.assertEqual(State.RUNNING, dag_run.state) @@ -1639,12 +1639,12 @@ def subdag(parent_dag_name, child_dag_name, args): """ Create a subdag. """ - dag_subdag = DAG(dag_id='%s.%s' % (parent_dag_name, child_dag_name), + dag_subdag = DAG(dag_id=f'{parent_dag_name}.{child_dag_name}', schedule_interval="@daily", default_args=args) for i in range(2): - DummyOperator(task_id='%s-task-%s' % (child_dag_name, i + 1), dag=dag_subdag) + DummyOperator(task_id='{}-task-{}'.format(child_dag_name, i + 1), dag=dag_subdag) return dag_subdag diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index 5a7cd9ab4f1be..bcf4ca0346530 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -309,8 +309,8 @@ def test_not_requeue_non_requeueable_task_instance(self): patch_dict = {} for dep in all_non_requeueable_deps: class_name = dep.__class__.__name__ - dep_patch = patch('%s.%s.%s' % (dep.__module__, class_name, - dep._get_dep_statuses.__name__)) + dep_patch = patch('{}.{}.{}'.format(dep.__module__, class_name, + dep._get_dep_statuses.__name__)) method_patch = dep_patch.start() method_patch.return_value = iter([TIDepStatus('mock_' + class_name, True, 'mock')]) @@ -897,7 +897,7 @@ def test_check_task_dependencies(self, trigger_rule, successes, skipped, dag=dag, owner='airflow', trigger_rule=trigger_rule) for i in range(5): - task = DummyOperator(task_id='runme_{}'.format(i), + task = DummyOperator(task_id=f'runme_{i}', dag=dag, owner='airflow') task.set_downstream(downstream) run_date = task.start_date + datetime.timedelta(days=5) @@ -1711,8 +1711,8 @@ def test_task_stats(self, stats_mock): session.commit() ti._run_raw_task() ti.refresh_from_db() - stats_mock.assert_called_with('ti.finish.{}.{}.{}'.format(dag.dag_id, op.task_id, ti.state)) - self.assertIn(call('ti.start.{}.{}'.format(dag.dag_id, op.task_id)), stats_mock.mock_calls) + stats_mock.assert_called_with(f'ti.finish.{dag.dag_id}.{op.task_id}.{ti.state}') + self.assertIn(call(f'ti.start.{dag.dag_id}.{op.task_id}'), stats_mock.mock_calls) self.assertEqual(stats_mock.call_count, 5) def test_generate_command_default_param(self): diff --git a/tests/operators/test_bash.py b/tests/operators/test_bash.py index 389efee829362..22dd60051affa 100644 --- a/tests/operators/test_bash.py +++ b/tests/operators/test_bash.py @@ -81,7 +81,7 @@ def test_echo_env_variables(self): task.run(DEFAULT_DATE, DEFAULT_DATE, ignore_first_depends_on_past=True, ignore_ti_state=True) - with open(tmp_file.name, 'r') as file: + with open(tmp_file.name) as file: output = ''.join(file.readlines()) self.assertIn('MY_PATH_TO_AIRFLOW_HOME', output) # exported in run-tests as part of PYTHONPATH diff --git a/tests/operators/test_generic_transfer.py b/tests/operators/test_generic_transfer.py index 79355045e75dc..6ad670615b55b 100644 --- a/tests/operators/test_generic_transfer.py +++ b/tests/operators/test_generic_transfer.py @@ -48,7 +48,7 @@ def tearDown(self): drop_tables = {'test_mysql_to_mysql', 'test_airflow'} with MySqlHook().get_conn() as conn: for table in drop_tables: - conn.execute("DROP TABLE IF EXISTS {}".format(table)) + conn.execute(f"DROP TABLE IF EXISTS {table}") @parameterized.expand([("mysqlclient",), ("mysql-connector-python",), ]) def test_mysql_to_mysql(self, client): diff --git a/tests/operators/test_python.py b/tests/operators/test_python.py index ac3b82d311235..f804f06e855ad 100644 --- a/tests/operators/test_python.py +++ b/tests/operators/test_python.py @@ -196,7 +196,7 @@ def test_python_callable_arguments_are_templatized(self): recorded_calls[0], Call(4, date(2019, 1, 1), - "dag {} ran on {}.".format(self.dag.dag_id, ds_templated), + f"dag {self.dag.dag_id} ran on {ds_templated}.", Named(ds_templated, 'unchanged')) ) @@ -262,7 +262,7 @@ def test_conflicting_kwargs(self): def func(dag): # An ValueError should be triggered since we're using dag as a # reserved keyword - raise RuntimeError("Should not be triggered, dag: {}".format(dag)) + raise RuntimeError(f"Should not be triggered, dag: {dag}") python_operator = PythonOperator( task_id='python_operator', @@ -444,7 +444,7 @@ def test_python_callable_arguments_are_templatized(self): recorded_calls[0], Call(4, date(2019, 1, 1), - "dag {} ran on {}.".format(self.dag.dag_id, ds_templated), + f"dag {self.dag.dag_id} ran on {ds_templated}.", Named(ds_templated, 'unchanged')) ) diff --git a/tests/operators/test_subdag_operator.py b/tests/operators/test_subdag_operator.py index f1f7396bea360..f1946d36af96e 100644 --- a/tests/operators/test_subdag_operator.py +++ b/tests/operators/test_subdag_operator.py @@ -303,7 +303,7 @@ def test_subdag_with_propagate_skipped_state( propagate_skipped_state=propagate_option ) dummy_subdag_tasks = [ - DummyOperator(task_id='dummy_subdag_{}'.format(i), dag=subdag) + DummyOperator(task_id=f'dummy_subdag_{i}', dag=subdag) for i in range(len(states)) ] dummy_dag_task = DummyOperator(task_id='dummy_dag', dag=dag) diff --git a/tests/providers/amazon/aws/hooks/test_batch_client.py b/tests/providers/amazon/aws/hooks/test_batch_client.py index ab5c0d5bbb3cf..07e97d9147377 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_client.py +++ b/tests/providers/amazon/aws/hooks/test_batch_client.py @@ -126,7 +126,7 @@ def test_poll_job_complete_raises_for_max_retries(self): self.client_mock.describe_jobs.return_value = {"jobs": [{"jobId": JOB_ID, "status": "RUNNING"}]} with self.assertRaises(AirflowException) as e: self.batch_client.poll_for_job_complete(JOB_ID) - msg = "AWS Batch job ({}) status checks exceed max_retries".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) status checks exceed max_retries" self.assertIn(msg, str(e.exception)) self.client_mock.describe_jobs.assert_called_with(jobs=[JOB_ID]) self.assertEqual(self.client_mock.describe_jobs.call_count, self.MAX_RETRIES + 1) @@ -138,7 +138,7 @@ def test_poll_job_status_hit_api_throttle(self): ) with self.assertRaises(AirflowException) as e: self.batch_client.poll_for_job_complete(JOB_ID) - msg = "AWS Batch job ({}) description error".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) description error" self.assertIn(msg, str(e.exception)) # It should retry when this client error occurs self.client_mock.describe_jobs.assert_called_with(jobs=[JOB_ID]) @@ -151,7 +151,7 @@ def test_poll_job_status_with_client_error(self): ) with self.assertRaises(AirflowException) as e: self.batch_client.poll_for_job_complete(JOB_ID) - msg = "AWS Batch job ({}) description error".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) description error" self.assertIn(msg, str(e.exception)) # It will not retry when this client error occurs self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) @@ -176,7 +176,7 @@ def test_check_job_success_raises_failed(self): with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) - msg = "AWS Batch job ({}) failed".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) failed" self.assertIn(msg, str(e.exception)) def test_check_job_success_raises_failed_for_multiple_attempts(self): @@ -193,7 +193,7 @@ def test_check_job_success_raises_failed_for_multiple_attempts(self): with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) - msg = "AWS Batch job ({}) failed".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) failed" self.assertIn(msg, str(e.exception)) def test_check_job_success_raises_incomplete(self): @@ -201,7 +201,7 @@ def test_check_job_success_raises_incomplete(self): with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) - msg = "AWS Batch job ({}) is not complete".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) is not complete" self.assertIn(msg, str(e.exception)) def test_check_job_success_raises_unknown_status(self): @@ -210,7 +210,7 @@ def test_check_job_success_raises_unknown_status(self): with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) - msg = "AWS Batch job ({}) has unknown status".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) has unknown status" self.assertIn(msg, str(e.exception)) self.assertIn(status, str(e.exception)) @@ -219,7 +219,7 @@ def test_check_job_success_raises_without_jobs(self): with self.assertRaises(AirflowException) as e: self.batch_client.check_job_success(JOB_ID) self.client_mock.describe_jobs.assert_called_once_with(jobs=[JOB_ID]) - msg = "AWS Batch job ({}) description error".format(JOB_ID) + msg = f"AWS Batch job ({JOB_ID}) description error" self.assertIn(msg, str(e.exception)) def test_terminate_job(self): diff --git a/tests/providers/amazon/aws/hooks/test_batch_waiters.py b/tests/providers/amazon/aws/hooks/test_batch_waiters.py index 09adc93e8597a..ced6296679c3e 100644 --- a/tests/providers/amazon/aws/hooks/test_batch_waiters.py +++ b/tests/providers/amazon/aws/hooks/test_batch_waiters.py @@ -198,7 +198,7 @@ def batch_infrastructure( assert resp["jobDefinitionArn"] job_definition_arn = resp["jobDefinitionArn"] assert resp["revision"] - assert resp["jobDefinitionArn"].endswith("{0}:{1}".format(resp["jobDefinitionName"], resp["revision"])) + assert resp["jobDefinitionArn"].endswith("{}:{}".format(resp["jobDefinitionName"], resp["revision"])) infrastructure.vpc_id = vpc_id infrastructure.subnet_id = subnet_id diff --git a/tests/providers/amazon/aws/hooks/test_datasync.py b/tests/providers/amazon/aws/hooks/test_datasync.py index 450dd52f07b4f..cbcb1a47946e6 100644 --- a/tests/providers/amazon/aws/hooks/test_datasync.py +++ b/tests/providers/amazon/aws/hooks/test_datasync.py @@ -93,7 +93,7 @@ def setUp(self): AgentArns=["stuff"], )["LocationArn"] self.destination_location_arn = self.client.create_location_s3( - S3BucketArn="arn:aws:s3:::{0}".format(self.destination_bucket_name), + S3BucketArn=f"arn:aws:s3:::{self.destination_bucket_name}", Subdirectory=self.destination_bucket_dir, S3Config={"BucketAccessRoleArn": "role"}, )["LocationArn"] @@ -138,7 +138,7 @@ def test_create_location_smb(self, mock_get_conn): domain = "COMPANY.DOMAIN" mount_options = {"Version": "SMB2"} - location_uri = "smb://{0}/{1}".format(server_hostname, subdirectory) + location_uri = f"smb://{server_hostname}/{subdirectory}" create_location_kwargs = { "ServerHostname": server_hostname, @@ -175,7 +175,7 @@ def test_create_location_s3(self, mock_get_conn): subdirectory = "my_subdir" s3_config = {"BucketAccessRoleArn": "myrole"} - location_uri = "s3://{0}/{1}".format(s3_bucket_arn, subdirectory) + location_uri = f"s3://{s3_bucket_arn}/{subdirectory}" create_location_kwargs = { "S3BucketArn": s3_bucket_arn, @@ -268,7 +268,7 @@ def test_get_location_arns(self, mock_get_conn): # ### Begin tests: # Get true location_arn from boto/moto self.client - location_uri = "smb://{0}/{1}".format(self.source_server_hostname, self.source_subdirectory) + location_uri = f"smb://{self.source_server_hostname}/{self.source_subdirectory}" locations = self.client.list_locations() for location in locations["Locations"]: if location["LocationUri"] == location_uri: @@ -286,7 +286,7 @@ def test_get_location_arns_case_sensitive(self, mock_get_conn): # ### Begin tests: # Get true location_arn from boto/moto self.client - location_uri = "smb://{0}/{1}".format(self.source_server_hostname.upper(), self.source_subdirectory) + location_uri = f"smb://{self.source_server_hostname.upper()}/{self.source_subdirectory}" locations = self.client.list_locations() for location in locations["Locations"]: if location["LocationUri"] == location_uri.lower(): @@ -305,7 +305,7 @@ def test_get_location_arns_trailing_slash(self, mock_get_conn): # ### Begin tests: # Get true location_arn from boto/moto self.client - location_uri = "smb://{0}/{1}/".format(self.source_server_hostname, self.source_subdirectory) + location_uri = f"smb://{self.source_server_hostname}/{self.source_subdirectory}/" locations = self.client.list_locations() for location in locations["Locations"]: if location["LocationUri"] == location_uri[:-1]: diff --git a/tests/providers/amazon/aws/hooks/test_glue_catalog.py b/tests/providers/amazon/aws/hooks/test_glue_catalog.py index aee649e5fdeb1..3c2adc59464ba 100644 --- a/tests/providers/amazon/aws/hooks/test_glue_catalog.py +++ b/tests/providers/amazon/aws/hooks/test_glue_catalog.py @@ -34,7 +34,7 @@ "Name": TABLE_NAME, "StorageDescriptor": { "Columns": [{"Name": "string", "Type": "string", "Comment": "string"}], - "Location": "s3://mybucket/{}/{}".format(DB_NAME, TABLE_NAME), + "Location": f"s3://mybucket/{DB_NAME}/{TABLE_NAME}", }, } diff --git a/tests/providers/amazon/aws/hooks/test_s3.py b/tests/providers/amazon/aws/hooks/test_s3.py index 337352e257dc7..97d9517330844 100644 --- a/tests/providers/amazon/aws/hooks/test_s3.py +++ b/tests/providers/amazon/aws/hooks/test_s3.py @@ -152,9 +152,9 @@ def test_check_for_key(self, s3_bucket): bucket.put_object(Key='a', Body=b'a') assert hook.check_for_key('a', s3_bucket) is True - assert hook.check_for_key('s3://{}//a'.format(s3_bucket)) is True + assert hook.check_for_key(f's3://{s3_bucket}//a') is True assert hook.check_for_key('b', s3_bucket) is False - assert hook.check_for_key('s3://{}//b'.format(s3_bucket)) is False + assert hook.check_for_key(f's3://{s3_bucket}//b') is False def test_check_for_key_raises_error_with_invalid_conn_id(self, monkeypatch, s3_bucket): monkeypatch.delenv('AWS_PROFILE', raising=False) @@ -170,7 +170,7 @@ def test_get_key(self, s3_bucket): bucket.put_object(Key='a', Body=b'a') assert hook.get_key('a', s3_bucket).key == 'a' - assert hook.get_key('s3://{}/a'.format(s3_bucket)).key == 'a' + assert hook.get_key(f's3://{s3_bucket}/a').key == 'a' def test_read_key(self, s3_bucket): hook = S3Hook() @@ -196,13 +196,13 @@ def test_check_for_wildcard_key(self, s3_bucket): assert hook.check_for_wildcard_key('a*', s3_bucket) is True assert hook.check_for_wildcard_key('abc', s3_bucket) is True - assert hook.check_for_wildcard_key('s3://{}//a*'.format(s3_bucket)) is True - assert hook.check_for_wildcard_key('s3://{}//abc'.format(s3_bucket)) is True + assert hook.check_for_wildcard_key(f's3://{s3_bucket}//a*') is True + assert hook.check_for_wildcard_key(f's3://{s3_bucket}//abc') is True assert hook.check_for_wildcard_key('a', s3_bucket) is False assert hook.check_for_wildcard_key('b', s3_bucket) is False - assert hook.check_for_wildcard_key('s3://{}//a'.format(s3_bucket)) is False - assert hook.check_for_wildcard_key('s3://{}//b'.format(s3_bucket)) is False + assert hook.check_for_wildcard_key(f's3://{s3_bucket}//a') is False + assert hook.check_for_wildcard_key(f's3://{s3_bucket}//b') is False def test_get_wildcard_key(self, s3_bucket): hook = S3Hook() @@ -216,14 +216,14 @@ def test_get_wildcard_key(self, s3_bucket): assert hook.get_wildcard_key('a*', s3_bucket).key == 'a/b' assert hook.get_wildcard_key('a*', s3_bucket, delimiter='/').key == 'abc' assert hook.get_wildcard_key('abc', s3_bucket, delimiter='/').key == 'abc' - assert hook.get_wildcard_key('s3://{}/a*'.format(s3_bucket)).key == 'a/b' - assert hook.get_wildcard_key('s3://{}/a*'.format(s3_bucket), delimiter='/').key == 'abc' - assert hook.get_wildcard_key('s3://{}/abc'.format(s3_bucket), delimiter='/').key == 'abc' + assert hook.get_wildcard_key(f's3://{s3_bucket}/a*').key == 'a/b' + assert hook.get_wildcard_key(f's3://{s3_bucket}/a*', delimiter='/').key == 'abc' + assert hook.get_wildcard_key(f's3://{s3_bucket}/abc', delimiter='/').key == 'abc' assert hook.get_wildcard_key('a', s3_bucket) is None assert hook.get_wildcard_key('b', s3_bucket) is None - assert hook.get_wildcard_key('s3://{}/a'.format(s3_bucket)) is None - assert hook.get_wildcard_key('s3://{}/b'.format(s3_bucket)) is None + assert hook.get_wildcard_key(f's3://{s3_bucket}/a') is None + assert hook.get_wildcard_key(f's3://{s3_bucket}/b') is None def test_load_string(self, s3_bucket): hook = S3Hook() @@ -363,7 +363,7 @@ def test_delete_objects_many_keys(self, mocked_s3_res, s3_bucket): num_keys_to_remove = 1001 keys = [] for index in range(num_keys_to_remove): - key = 'key-{}'.format(index) + key = f'key-{index}' mocked_s3_res.Object(s3_bucket, key).put(Body=b'Data') keys.append(key) diff --git a/tests/providers/amazon/aws/hooks/test_sagemaker.py b/tests/providers/amazon/aws/hooks/test_sagemaker.py index 05c95371dd3f4..a94281a6452fa 100644 --- a/tests/providers/amazon/aws/hooks/test_sagemaker.py +++ b/tests/providers/amazon/aws/hooks/test_sagemaker.py @@ -39,7 +39,7 @@ path = 'local/data' bucket = 'test-bucket' key = 'test/data' -data_url = 's3://{}/{}'.format(bucket, key) +data_url = f's3://{bucket}/{key}' job_name = 'test-job' model_name = 'test-model' @@ -48,7 +48,7 @@ image = 'test-image' test_arn_return = {'Arn': 'testarn'} -output_url = 's3://{}/test/output'.format(bucket) +output_url = f's3://{bucket}/test/output' create_training_params = { 'AlgorithmSpecification': {'TrainingImage': image, 'TrainingInputMode': 'File'}, diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py index 9eedd6b0b6c2f..6aad465034d6a 100644 --- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py +++ b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py @@ -49,7 +49,7 @@ def setUp(self): self.filename_template = '{dag_id}/{task_id}/{execution_date}/{try_number}.log' self.cloudwatch_task_handler = CloudwatchTaskHandler( self.local_log_location, - "arn:aws:logs:{}:11111111:log-group:{}".format(self.region_name, self.remote_log_group), + f"arn:aws:logs:{self.region_name}:11111111:log-group:{self.remote_log_group}", self.filename_template, ) self.cloudwatch_task_handler.hook @@ -80,7 +80,7 @@ def test_hook(self): def test_hook_raises(self): handler = CloudwatchTaskHandler( self.local_log_location, - "arn:aws:logs:{}:11111111:log-group:{}".format(self.region_name, self.remote_log_group), + f"arn:aws:logs:{self.region_name}:11111111:log-group:{self.remote_log_group}", self.filename_template, ) diff --git a/tests/providers/amazon/aws/operators/test_datasync.py b/tests/providers/amazon/aws/operators/test_datasync.py index 1fb9f1e84d0fe..f220d1667425e 100644 --- a/tests/providers/amazon/aws/operators/test_datasync.py +++ b/tests/providers/amazon/aws/operators/test_datasync.py @@ -50,9 +50,9 @@ def no_datasync(x): SOURCE_SUBDIR = "airflow_subdir" DESTINATION_BUCKET_NAME = "airflow_bucket" -SOURCE_LOCATION_URI = "smb://{0}/{1}".format(SOURCE_HOST_NAME, SOURCE_SUBDIR) -DESTINATION_LOCATION_URI = "s3://{0}".format(DESTINATION_BUCKET_NAME) -DESTINATION_LOCATION_ARN = "arn:aws:s3:::{0}".format(DESTINATION_BUCKET_NAME) +SOURCE_LOCATION_URI = f"smb://{SOURCE_HOST_NAME}/{SOURCE_SUBDIR}" +DESTINATION_LOCATION_URI = f"s3://{DESTINATION_BUCKET_NAME}" +DESTINATION_LOCATION_ARN = f"arn:aws:s3:::{DESTINATION_BUCKET_NAME}" CREATE_TASK_KWARGS = {"Options": {"VerifyMode": "NONE", "Atime": "NONE"}} UPDATE_TASK_KWARGS = {"Options": {"VerifyMode": "BEST_EFFORT", "Atime": "NONE"}} diff --git a/tests/providers/amazon/aws/operators/test_s3_copy_object.py b/tests/providers/amazon/aws/operators/test_s3_copy_object.py index e049d0b41856f..c9f6af31e335a 100644 --- a/tests/providers/amazon/aws/operators/test_s3_copy_object.py +++ b/tests/providers/amazon/aws/operators/test_s3_copy_object.py @@ -67,8 +67,8 @@ def test_s3_copy_object_arg_combination_2(self): # there should be nothing found before S3CopyObjectOperator is executed self.assertFalse('Contents' in conn.list_objects(Bucket=self.dest_bucket, Prefix=self.dest_key)) - source_key_s3_url = "s3://{}/{}".format(self.source_bucket, self.source_key) - dest_key_s3_url = "s3://{}/{}".format(self.dest_bucket, self.dest_key) + source_key_s3_url = f"s3://{self.source_bucket}/{self.source_key}" + dest_key_s3_url = f"s3://{self.dest_bucket}/{self.dest_key}" op = S3CopyObjectOperator( task_id="test_task_s3_copy_object", source_bucket_key=source_key_s3_url, diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py b/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py index f9abf32e87d1c..44563204571f1 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py +++ b/tests/providers/amazon/aws/operators/test_sagemaker_endpoint.py @@ -28,7 +28,7 @@ role = 'arn:aws:iam:role/test-role' bucket = 'test-bucket' image = 'test-image' -output_url = 's3://{}/test/output'.format(bucket) +output_url = f's3://{bucket}/test/output' model_name = 'test-model-name' config_name = 'test-endpoint-config-name' endpoint_name = 'test-endpoint-name' diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_model.py b/tests/providers/amazon/aws/operators/test_sagemaker_model.py index 64f1f5e59fb9e..cf8aaa925a879 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_model.py +++ b/tests/providers/amazon/aws/operators/test_sagemaker_model.py @@ -32,7 +32,7 @@ image = 'test-image' -output_url = 's3://{}/test/output'.format(bucket) +output_url = f's3://{bucket}/test/output' create_model_params = { 'ModelName': model_name, 'PrimaryContainer': { diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_training.py b/tests/providers/amazon/aws/operators/test_sagemaker_training.py index ea96b04d1d460..e7134860a2207 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_training.py +++ b/tests/providers/amazon/aws/operators/test_sagemaker_training.py @@ -28,13 +28,13 @@ bucket = 'test-bucket' key = 'test/data' -data_url = 's3://{}/{}'.format(bucket, key) +data_url = f's3://{bucket}/{key}' job_name = 'test-job-name' image = 'test-image' -output_url = 's3://{}/test/output'.format(bucket) +output_url = f's3://{bucket}/test/output' create_training_params = { 'AlgorithmSpecification': {'TrainingImage': image, 'TrainingInputMode': 'File'}, 'RoleArn': role, diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_transform.py b/tests/providers/amazon/aws/operators/test_sagemaker_transform.py index 5be137427f421..ccbaa09b51918 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_transform.py +++ b/tests/providers/amazon/aws/operators/test_sagemaker_transform.py @@ -29,7 +29,7 @@ bucket = 'test-bucket' key = 'test/data' -data_url = 's3://{}/{}'.format(bucket, key) +data_url = f's3://{bucket}/{key}' job_name = 'test-job-name' @@ -37,7 +37,7 @@ image = 'test-image' -output_url = 's3://{}/test/output'.format(bucket) +output_url = f's3://{bucket}/test/output' create_transform_params = { 'TransformJobName': job_name, diff --git a/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py b/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py index ed396bd61689a..a197ddf3d925b 100644 --- a/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py +++ b/tests/providers/amazon/aws/operators/test_sagemaker_tuning.py @@ -29,13 +29,13 @@ bucket = 'test-bucket' key = 'test/data' -data_url = 's3://{}/{}'.format(bucket, key) +data_url = f's3://{bucket}/{key}' job_name = 'test-job-name' image = 'test-image' -output_url = 's3://{}/test/output'.format(bucket) +output_url = f's3://{bucket}/test/output' create_tuning_params = { 'HyperParameterTuningJobName': job_name, diff --git a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py index 64f8f49f1361d..4545418055228 100644 --- a/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_redshift_to_s3.py @@ -69,7 +69,7 @@ def test_execute( ).execute(None) unload_options = '\n\t\t\t'.join(unload_options) - select_query = "SELECT * FROM {schema}.{table}".format(schema=schema, table=table) + select_query = f"SELECT * FROM {schema}.{table}" unload_query = """ UNLOAD ('{select_query}') TO 's3://{s3_bucket}/{s3_key}' diff --git a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py index 2ccc7988a7817..4108f88923806 100644 --- a/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py +++ b/tests/providers/amazon/aws/transfers/test_s3_to_sftp.py @@ -113,7 +113,7 @@ def test_s3_to_sftp_operation(self): check_file_task = SSHOperator( task_id="test_check_file", ssh_hook=self.hook, - command="cat {0}".format(self.sftp_path), + command=f"cat {self.sftp_path}", do_xcom_push=True, dag=self.dag, ) @@ -135,7 +135,7 @@ def delete_remote_resource(self): remove_file_task = SSHOperator( task_id="test_rm_file", ssh_hook=self.hook, - command="rm {0}".format(self.sftp_path), + command=f"rm {self.sftp_path}", do_xcom_push=True, dag=self.dag, ) diff --git a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py index 381a463e0a056..c9767c3b8b23b 100644 --- a/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py +++ b/tests/providers/amazon/aws/transfers/test_sftp_to_s3.py @@ -81,7 +81,7 @@ def test_sftp_to_s3_operation(self): create_file_task = SSHOperator( task_id="test_create_file", ssh_hook=self.hook, - command="echo '{0}' > {1}".format(test_remote_file_content, self.sftp_path), + command=f"echo '{test_remote_file_content}' > {self.sftp_path}", do_xcom_push=True, dag=self.dag, ) diff --git a/tests/providers/apache/hdfs/hooks/test_webhdfs.py b/tests/providers/apache/hdfs/hooks/test_webhdfs.py index 05ca418a7c6ef..012d3548cd505 100644 --- a/tests/providers/apache/hdfs/hooks/test_webhdfs.py +++ b/tests/providers/apache/hdfs/hooks/test_webhdfs.py @@ -43,7 +43,7 @@ def test_get_conn(self, socket_mock, mock_get_connections, mock_insecure_client) mock_insecure_client.assert_has_calls( [ call( - 'http://{host}:{port}'.format(host=connection.host, port=connection.port), + f'http://{connection.host}:{connection.port}', user=connection.login, ) for connection in mock_get_connections.return_value @@ -66,9 +66,7 @@ def test_get_conn_kerberos_security_mode( conn = self.webhdfs_hook.get_conn() connection = mock_get_connections.return_value[0] - mock_kerberos_client.assert_called_once_with( - 'http://{host}:{port}'.format(host=connection.host, port=connection.port) - ) + mock_kerberos_client.assert_called_once_with(f'http://{connection.host}:{connection.port}') self.assertEqual(conn, mock_kerberos_client.return_value) @patch('airflow.providers.apache.hdfs.hooks.webhdfs.WebHDFSHook._find_valid_server', return_value=None) diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py index f658957c5cb81..050f98c67c0d5 100644 --- a/tests/providers/apache/hive/hooks/test_hive.py +++ b/tests/providers/apache/hive/hooks/test_hive.py @@ -408,7 +408,7 @@ def test_check_for_partition(self): metastore = self.hook.metastore.__enter__() - partition = "{p_by}='{date}'".format(date=DEFAULT_DATE_DS, p_by=self.partition_by) + partition = f"{self.partition_by}='{DEFAULT_DATE_DS}'" metastore.get_partitions_by_filter = mock.MagicMock(return_value=[fake_partition]) @@ -417,7 +417,7 @@ def test_check_for_partition(self): metastore.get_partitions_by_filter(self.database, self.table, partition, 1) # Check for non-existent partition. - missing_partition = "{p_by}='{date}'".format(date=self.next_day, p_by=self.partition_by) + missing_partition = f"{self.partition_by}='{self.next_day}'" metastore.get_partitions_by_filter = mock.MagicMock(return_value=[]) self.assertFalse(self.hook.check_for_partition(self.database, self.table, missing_partition)) @@ -428,7 +428,7 @@ def test_check_for_named_partition(self): # Check for existing partition. - partition = "{p_by}={date}".format(date=DEFAULT_DATE_DS, p_by=self.partition_by) + partition = f"{self.partition_by}={DEFAULT_DATE_DS}" self.hook.metastore.__enter__().check_for_named_partition = mock.MagicMock(return_value=True) @@ -439,7 +439,7 @@ def test_check_for_named_partition(self): ) # Check for non-existent partition - missing_partition = "{p_by}={date}".format(date=self.next_day, p_by=self.partition_by) + missing_partition = f"{self.partition_by}={self.next_day}" self.hook.metastore.__enter__().check_for_named_partition = mock.MagicMock(return_value=False) @@ -574,7 +574,7 @@ def setUp(self): LOAD DATA LOCAL INPATH '{{ params.csv_path }}' OVERWRITE INTO TABLE {{ params.table }}; """ - self.columns = ['{}.a'.format(self.table), '{}.b'.format(self.table)] + self.columns = [f'{self.table}.a', f'{self.table}.b'] with mock.patch( 'airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client' @@ -609,7 +609,7 @@ def test_get_conn_with_password(self, mock_connect): def test_get_records(self): hook = MockHiveServer2Hook() - query = "SELECT * FROM {}".format(self.table) + query = f"SELECT * FROM {self.table}" with mock.patch.dict( 'os.environ', @@ -636,7 +636,7 @@ def test_get_records(self): def test_get_pandas_df(self): hook = MockHiveServer2Hook() - query = "SELECT * FROM {}".format(self.table) + query = f"SELECT * FROM {self.table}" with mock.patch.dict( 'os.environ', @@ -665,7 +665,7 @@ def test_get_pandas_df(self): def test_get_results_header(self): hook = MockHiveServer2Hook() - query = "SELECT * FROM {}".format(self.table) + query = f"SELECT * FROM {self.table}" results = hook.get_results(query, schema=self.database) self.assertListEqual([col[0] for col in results['header']], self.columns) @@ -673,7 +673,7 @@ def test_get_results_header(self): def test_get_results_data(self): hook = MockHiveServer2Hook() - query = "SELECT * FROM {}".format(self.table) + query = f"SELECT * FROM {self.table}" results = hook.get_results(query, schema=self.database) self.assertListEqual(results['data'], [(1, 1), (2, 2)]) @@ -692,7 +692,7 @@ def test_to_csv(self): ] ) ) - query = "SELECT * FROM {}".format(self.table) + query = f"SELECT * FROM {self.table}" csv_filepath = 'query_results.csv' hook.to_csv( query, @@ -711,7 +711,7 @@ def test_to_csv(self): def test_multi_statements(self): sqls = [ "CREATE TABLE IF NOT EXISTS test_multi_statements (i INT)", - "SELECT * FROM {}".format(self.table), + f"SELECT * FROM {self.table}", "DROP TABLE test_multi_statements", ] @@ -737,7 +737,7 @@ def test_multi_statements(self): hook.get_conn.assert_called_with(self.database) hook.mock_cursor.execute.assert_any_call('CREATE TABLE IF NOT EXISTS test_multi_statements (i INT)') - hook.mock_cursor.execute.assert_any_call('SELECT * FROM {}'.format(self.table)) + hook.mock_cursor.execute.assert_any_call(f'SELECT * FROM {self.table}') hook.mock_cursor.execute.assert_any_call('DROP TABLE test_multi_statements') hook.mock_cursor.execute.assert_any_call('set airflow.ctx.dag_id=test_dag_id') hook.mock_cursor.execute.assert_any_call('set airflow.ctx.task_id=HiveHook_3835') diff --git a/tests/providers/apache/hive/operators/test_hive_stats.py b/tests/providers/apache/hive/operators/test_hive_stats.py index 6e8a0b7d3358f..cb9814e368548 100644 --- a/tests/providers/apache/hive/operators/test_hive_stats.py +++ b/tests/providers/apache/hive/operators/test_hive_stats.py @@ -54,7 +54,7 @@ def test_get_default_exprs(self): default_exprs = HiveStatsCollectionOperator(**self.kwargs).get_default_exprs(col, None) - self.assertEqual(default_exprs, {(col, 'non_null'): 'COUNT({})'.format(col)}) + self.assertEqual(default_exprs, {(col, 'non_null'): f'COUNT({col})'}) def test_get_default_exprs_excluded_cols(self): col = 'excluded_col' @@ -72,11 +72,11 @@ def test_get_default_exprs_number(self): self.assertEqual( default_exprs, { - (col, 'avg'): 'AVG({})'.format(col), - (col, 'max'): 'MAX({})'.format(col), - (col, 'min'): 'MIN({})'.format(col), - (col, 'non_null'): 'COUNT({})'.format(col), - (col, 'sum'): 'SUM({})'.format(col), + (col, 'avg'): f'AVG({col})', + (col, 'max'): f'MAX({col})', + (col, 'min'): f'MIN({col})', + (col, 'non_null'): f'COUNT({col})', + (col, 'sum'): f'SUM({col})', }, ) @@ -89,9 +89,9 @@ def test_get_default_exprs_boolean(self): self.assertEqual( default_exprs, { - (col, 'false'): 'SUM(CASE WHEN NOT {} THEN 1 ELSE 0 END)'.format(col), - (col, 'non_null'): 'COUNT({})'.format(col), - (col, 'true'): 'SUM(CASE WHEN {} THEN 1 ELSE 0 END)'.format(col), + (col, 'false'): f'SUM(CASE WHEN NOT {col} THEN 1 ELSE 0 END)', + (col, 'non_null'): f'COUNT({col})', + (col, 'true'): f'SUM(CASE WHEN {col} THEN 1 ELSE 0 END)', }, ) @@ -104,9 +104,9 @@ def test_get_default_exprs_string(self): self.assertEqual( default_exprs, { - (col, 'approx_distinct'): 'APPROX_DISTINCT({})'.format(col), - (col, 'len'): 'SUM(CAST(LENGTH({}) AS BIGINT))'.format(col), - (col, 'non_null'): 'COUNT({})'.format(col), + (col, 'approx_distinct'): f'APPROX_DISTINCT({col})', + (col, 'len'): f'SUM(CAST(LENGTH({col}) AS BIGINT))', + (col, 'non_null'): f'COUNT({col})', }, ) @@ -169,7 +169,7 @@ def test_execute_with_assignment_func( self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook, mock_json_dumps ): def assignment_func(col, _): - return {(col, 'test'): 'TEST({})'.format(col)} + return {(col, 'test'): f'TEST({col})'} self.kwargs.update(dict(assignment_func=assignment_func)) mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols = [fake_col] diff --git a/tests/providers/apache/hive/sensors/test_metastore_partition.py b/tests/providers/apache/hive/sensors/test_metastore_partition.py index fa8b9eda50d6e..36f8f1253a890 100644 --- a/tests/providers/apache/hive/sensors/test_metastore_partition.py +++ b/tests/providers/apache/hive/sensors/test_metastore_partition.py @@ -33,7 +33,7 @@ def test_hive_metastore_sql_sensor(self): conn_id='test_connection_id', sql='test_sql', table='airflow.static_babynames_partitioned', - partition_name='ds={}'.format(DEFAULT_DATE_DS), + partition_name=f'ds={DEFAULT_DATE_DS}', dag=self.dag, ) op._get_hook = mock.MagicMock(return_value=MockDBConnection({})) diff --git a/tests/providers/apache/hive/sensors/test_named_hive_partition.py b/tests/providers/apache/hive/sensors/test_named_hive_partition.py index 8a7ab7d319ba1..d5887181485f2 100644 --- a/tests/providers/apache/hive/sensors/test_named_hive_partition.py +++ b/tests/providers/apache/hive/sensors/test_named_hive_partition.py @@ -70,7 +70,7 @@ def test_parse_partition_name_correct(self): schema = 'default' table = 'users' partition = 'ds=2016-01-01/state=IT' - name = '{schema}.{table}/{partition}'.format(schema=schema, table=table, partition=partition) + name = f'{schema}.{table}/{partition}' parsed_schema, parsed_table, parsed_partition = NamedHivePartitionSensor.parse_partition_name(name) self.assertEqual(schema, parsed_schema) self.assertEqual(table, parsed_table) @@ -84,7 +84,7 @@ def test_parse_partition_name_incorrect(self): def test_parse_partition_name_default(self): table = 'users' partition = 'ds=2016-01-01/state=IT' - name = '{table}/{partition}'.format(table=table, partition=partition) + name = f'{table}/{partition}' parsed_schema, parsed_table, parsed_partition = NamedHivePartitionSensor.parse_partition_name(name) self.assertEqual('default', parsed_schema) self.assertEqual(table, parsed_table) @@ -92,7 +92,7 @@ def test_parse_partition_name_default(self): def test_poke_existing(self): self.hook.metastore.__enter__().check_for_named_partition.return_value = True - partitions = ["{}.{}/{}={}".format(self.database, self.table, self.partition_by, DEFAULT_DATE_DS)] + partitions = [f"{self.database}.{self.table}/{self.partition_by}={DEFAULT_DATE_DS}"] sensor = NamedHivePartitionSensor( partition_names=partitions, task_id='test_poke_existing', @@ -107,7 +107,7 @@ def test_poke_existing(self): def test_poke_non_existing(self): self.hook.metastore.__enter__().check_for_named_partition.return_value = False - partitions = ["{}.{}/{}={}".format(self.database, self.table, self.partition_by, self.next_day)] + partitions = [f"{self.database}.{self.table}/{self.partition_by}={self.next_day}"] sensor = NamedHivePartitionSensor( partition_names=partitions, task_id='test_poke_non_existing', diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py index 43ecb934aaaee..dbbf782d3b5be 100644 --- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py @@ -114,7 +114,7 @@ def test_execute_empty_description_field(self, mock_hive_hook, mock_mssql_hook, col_count = 0 for field in mock_mssql_hook_cursor.return_value.description: col_count += 1 - col_position = "Column{position}".format(position=col_count) + col_position = f"Column{col_count}" field_dict[col_position] = mssql_to_hive_transfer.type_map(field[1]) mock_hive_hook.return_value.load_file.assert_called_once_with( mock_tmp_file.name, diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py index 71f4de9050031..19a368049726a 100644 --- a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py @@ -302,7 +302,7 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file): try: with hook.get_conn() as conn: - conn.execute("DROP TABLE IF EXISTS {}".format(mysql_table)) + conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") conn.execute( """ CREATE TABLE {} ( @@ -321,7 +321,7 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file): op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', - sql="SELECT * FROM {}".format(mysql_table), + sql=f"SELECT * FROM {mysql_table}", hive_table='test_mysql_to_hive', dag=self.dag, ) @@ -338,7 +338,7 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file): self.assertEqual(mock_load_file.call_args[1]["field_dict"], ordered_dict) finally: with hook.get_conn() as conn: - conn.execute("DROP TABLE IF EXISTS {}".format(mysql_table)) + conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") @mock.patch('tempfile.tempdir', '/tmp/') @mock.patch('tempfile._RandomNameSequence.__next__') @@ -356,7 +356,7 @@ def test_mysql_to_hive_verify_csv_special_char(self, mock_popen, mock_temp_dir): try: db_record = ('c0', '["true"]') with hook.get_conn() as conn: - conn.execute("DROP TABLE IF EXISTS {}".format(mysql_table)) + conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") conn.execute( """ CREATE TABLE {} ( @@ -383,7 +383,7 @@ def test_mysql_to_hive_verify_csv_special_char(self, mock_popen, mock_temp_dir): op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', - sql="SELECT * FROM {}".format(mysql_table), + sql=f"SELECT * FROM {mysql_table}", hive_table=hive_table, recreate=True, delimiter=",", @@ -398,7 +398,7 @@ def test_mysql_to_hive_verify_csv_special_char(self, mock_popen, mock_temp_dir): mock_cursor.iterable = [('c0', '["true"]'), (2, 2)] hive_hook = MockHiveServer2Hook(connection_cursor=mock_cursor) - result = hive_hook.get_records("SELECT * FROM {}".format(hive_table)) + result = hive_hook.get_records(f"SELECT * FROM {hive_table}") self.assertEqual(result[0], db_record) hive_cmd = [ @@ -436,7 +436,7 @@ def test_mysql_to_hive_verify_csv_special_char(self, mock_popen, mock_temp_dir): ) finally: with hook.get_conn() as conn: - conn.execute("DROP TABLE IF EXISTS {}".format(mysql_table)) + conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") @mock.patch('tempfile.tempdir', '/tmp/') @mock.patch('tempfile._RandomNameSequence.__next__') @@ -466,7 +466,7 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen, mock_temp_dir): ) with hook.get_conn() as conn: - conn.execute("DROP TABLE IF EXISTS {}".format(mysql_table)) + conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") conn.execute( """ CREATE TABLE {} ( @@ -499,7 +499,7 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen, mock_temp_dir): op = MySqlToHiveOperator( task_id='test_m2h', hive_cli_conn_id='hive_cli_default', - sql="SELECT * FROM {}".format(mysql_table), + sql=f"SELECT * FROM {mysql_table}", hive_table=hive_table, recreate=True, delimiter=",", @@ -511,7 +511,7 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen, mock_temp_dir): mock_cursor.iterable = [minmax] hive_hook = MockHiveServer2Hook(connection_cursor=mock_cursor) - result = hive_hook.get_records("SELECT * FROM {}".format(hive_table)) + result = hive_hook.get_records(f"SELECT * FROM {hive_table}") self.assertEqual(result[0], minmax) hive_cmd = [ @@ -550,4 +550,4 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen, mock_temp_dir): finally: with hook.get_conn() as conn: - conn.execute("DROP TABLE IF EXISTS {}".format(mysql_table)) + conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") diff --git a/tests/providers/apache/hive/transfers/test_s3_to_hive.py b/tests/providers/apache/hive/transfers/test_s3_to_hive.py index 4c0ed475176c0..ad3fe94f24245 100644 --- a/tests/providers/apache/hive/transfers/test_s3_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_s3_to_hive.py @@ -228,7 +228,7 @@ def test_execute(self, mock_hiveclihook): # against expected file output mock_hiveclihook().load_file.side_effect = lambda *args, **kwargs: self.assertTrue( self._check_file_equality(args[0], op_fn, ext), - msg='{0} output file not as expected'.format(ext), + msg=f'{ext} output file not as expected', ) # Execute S3ToHiveTransfer s32hive = S3ToHiveOperator(**self.kwargs) @@ -256,7 +256,7 @@ def test_execute_with_select_expression(self, mock_hiveclihook): self.kwargs['headers'] = has_header self.kwargs['input_compressed'] = input_compressed self.kwargs['select_expression'] = select_expression - self.kwargs['s3_key'] = 's3://{0}/{1}'.format(bucket, key) + self.kwargs['s3_key'] = f's3://{bucket}/{key}' ip_fn = self._get_fn(ext, has_header) diff --git a/tests/providers/apache/kylin/hooks/test_kylin.py b/tests/providers/apache/kylin/hooks/test_kylin.py index 7f16ee61bd597..ade147ec3d0f0 100644 --- a/tests/providers/apache/kylin/hooks/test_kylin.py +++ b/tests/providers/apache/kylin/hooks/test_kylin.py @@ -59,7 +59,7 @@ def invoke_command(self, command, **kwargs): if command in invoke_command_list: return {"code": "000", "data": {}} else: - raise KylinCubeError('Unsupported invoke command for datasource: {}'.format(command)) + raise KylinCubeError(f'Unsupported invoke command for datasource: {command}') cube_source.return_value = MockCubeSource() response_data = {"code": "000", "data": {}} diff --git a/tests/providers/apache/livy/hooks/test_livy.py b/tests/providers/apache/livy/hooks/test_livy.py index 4985fd12caf35..316b4706e1b91 100644 --- a/tests/providers/apache/livy/hooks/test_livy.py +++ b/tests/providers/apache/livy/hooks/test_livy.py @@ -289,9 +289,7 @@ def test_post_batch_fail(self, mock): @requests_mock.mock() def test_get_batch_success(self, mock): - mock.register_uri( - 'GET', '//livy:8998/batches/{}'.format(BATCH_ID), json={'id': BATCH_ID}, status_code=200 - ) + mock.register_uri('GET', f'//livy:8998/batches/{BATCH_ID}', json={'id': BATCH_ID}, status_code=200) hook = LivyHook() resp = hook.get_batch(BATCH_ID) @@ -303,7 +301,7 @@ def test_get_batch_success(self, mock): def test_get_batch_fail(self, mock): mock.register_uri( 'GET', - '//livy:8998/batches/{}'.format(BATCH_ID), + f'//livy:8998/batches/{BATCH_ID}', json={'msg': 'Unable to find batch'}, status_code=404, reason='ERROR', @@ -325,7 +323,7 @@ def test_get_batch_state_success(self, mock): mock.register_uri( 'GET', - '//livy:8998/batches/{}/state'.format(BATCH_ID), + f'//livy:8998/batches/{BATCH_ID}/state', json={'id': BATCH_ID, 'state': running.value}, status_code=200, ) @@ -338,7 +336,7 @@ def test_get_batch_state_success(self, mock): @requests_mock.mock() def test_get_batch_state_fail(self, mock): mock.register_uri( - 'GET', '//livy:8998/batches/{}/state'.format(BATCH_ID), json={}, status_code=400, reason='ERROR' + 'GET', f'//livy:8998/batches/{BATCH_ID}/state', json={}, status_code=400, reason='ERROR' ) hook = LivyHook() @@ -347,7 +345,7 @@ def test_get_batch_state_fail(self, mock): @requests_mock.mock() def test_get_batch_state_missing(self, mock): - mock.register_uri('GET', '//livy:8998/batches/{}/state'.format(BATCH_ID), json={}, status_code=200) + mock.register_uri('GET', f'//livy:8998/batches/{BATCH_ID}/state', json={}, status_code=200) hook = LivyHook() with self.assertRaises(AirflowException): @@ -361,7 +359,7 @@ def test_parse_post_response(self): @requests_mock.mock() def test_delete_batch_success(self, mock): mock.register_uri( - 'DELETE', '//livy:8998/batches/{}'.format(BATCH_ID), json={'msg': 'deleted'}, status_code=200 + 'DELETE', f'//livy:8998/batches/{BATCH_ID}', json={'msg': 'deleted'}, status_code=200 ) resp = LivyHook().delete_batch(BATCH_ID) @@ -371,7 +369,7 @@ def test_delete_batch_success(self, mock): @requests_mock.mock() def test_delete_batch_fail(self, mock): mock.register_uri( - 'DELETE', '//livy:8998/batches/{}'.format(BATCH_ID), json={}, status_code=400, reason='ERROR' + 'DELETE', f'//livy:8998/batches/{BATCH_ID}', json={}, status_code=400, reason='ERROR' ) hook = LivyHook() @@ -388,9 +386,7 @@ def test_missing_batch_id(self, mock): @requests_mock.mock() def test_get_batch_validation(self, mock): - mock.register_uri( - 'GET', '//livy:8998/batches/{}'.format(BATCH_ID), json=SAMPLE_GET_RESPONSE, status_code=200 - ) + mock.register_uri('GET', f'//livy:8998/batches/{BATCH_ID}', json=SAMPLE_GET_RESPONSE, status_code=200) hook = LivyHook() with self.subTest('get_batch'): @@ -398,14 +394,14 @@ def test_get_batch_validation(self, mock): # make sure blocked by validation for val in [None, 'one', {'a': 'b'}]: - with self.subTest('get_batch {}'.format(val)): + with self.subTest(f'get_batch {val}'): with self.assertRaises(TypeError): hook.get_batch(val) @requests_mock.mock() def test_get_batch_state_validation(self, mock): mock.register_uri( - 'GET', '//livy:8998/batches/{}/state'.format(BATCH_ID), json=SAMPLE_GET_RESPONSE, status_code=200 + 'GET', f'//livy:8998/batches/{BATCH_ID}/state', json=SAMPLE_GET_RESPONSE, status_code=200 ) hook = LivyHook() @@ -413,22 +409,20 @@ def test_get_batch_state_validation(self, mock): hook.get_batch_state(BATCH_ID) for val in [None, 'one', {'a': 'b'}]: - with self.subTest('get_batch {}'.format(val)): + with self.subTest(f'get_batch {val}'): with self.assertRaises(TypeError): hook.get_batch_state(val) @requests_mock.mock() def test_delete_batch_validation(self, mock): - mock.register_uri( - 'DELETE', '//livy:8998/batches/{}'.format(BATCH_ID), json={'id': BATCH_ID}, status_code=200 - ) + mock.register_uri('DELETE', f'//livy:8998/batches/{BATCH_ID}', json={'id': BATCH_ID}, status_code=200) hook = LivyHook() with self.subTest('get_batch'): hook.delete_batch(BATCH_ID) for val in [None, 'one', {'a': 'b'}]: - with self.subTest('get_batch {}'.format(val)): + with self.subTest(f'get_batch {val}'): with self.assertRaises(TypeError): hook.delete_batch(val) diff --git a/tests/providers/apache/spark/hooks/test_spark_sql.py b/tests/providers/apache/spark/hooks/test_spark_sql.py index da79b0d24f12f..5cc001868a0af 100644 --- a/tests/providers/apache/spark/hooks/test_spark_sql.py +++ b/tests/providers/apache/spark/hooks/test_spark_sql.py @@ -69,7 +69,7 @@ def test_build_command(self): # Check if all config settings are there for key_value in self._config['conf'].split(","): k, v = key_value.split('=') - assert "--conf {0}={1}".format(k, v) in cmd + assert f"--conf {k}={v}" in cmd if self._config['verbose']: assert "--verbose" in cmd diff --git a/tests/providers/apache/sqoop/hooks/test_sqoop.py b/tests/providers/apache/sqoop/hooks/test_sqoop.py index 6249be2375efd..5b89eb867a0f6 100644 --- a/tests/providers/apache/sqoop/hooks/test_sqoop.py +++ b/tests/providers/apache/sqoop/hooks/test_sqoop.py @@ -205,7 +205,7 @@ def test_submit(self): self.assertIn("--num-mappers {}".format(self._config['num_mappers']), cmd) for key, value in self._config['properties'].items(): - self.assertIn("-D {}={}".format(key, value), cmd) + self.assertIn(f"-D {key}={value}", cmd) # We don't have the sqoop binary available, and this is hard to mock, # so just accept an exception for now. diff --git a/tests/providers/databricks/hooks/test_databricks.py b/tests/providers/databricks/hooks/test_databricks.py index be3abd24bb547..ac70edcf91e59 100644 --- a/tests/providers/databricks/hooks/test_databricks.py +++ b/tests/providers/databricks/hooks/test_databricks.py @@ -43,7 +43,7 @@ LOGIN = 'login' PASSWORD = 'password' TOKEN = 'token' -USER_AGENT_HEADER = {'user-agent': 'airflow-{v}'.format(v=__version__)} +USER_AGENT_HEADER = {'user-agent': f'airflow-{__version__}'} RUN_PAGE_URL = 'https://XX.cloud.databricks.com/#jobs/1/runs/1' LIFE_CYCLE_STATE = 'PENDING' STATE_MESSAGE = 'Waiting for cluster' @@ -61,49 +61,49 @@ def run_now_endpoint(host): """ Utility function to generate the run now endpoint given the host. """ - return 'https://{}/api/2.0/jobs/run-now'.format(host) + return f'https://{host}/api/2.0/jobs/run-now' def submit_run_endpoint(host): """ Utility function to generate the submit run endpoint given the host. """ - return 'https://{}/api/2.0/jobs/runs/submit'.format(host) + return f'https://{host}/api/2.0/jobs/runs/submit' def get_run_endpoint(host): """ Utility function to generate the get run endpoint given the host. """ - return 'https://{}/api/2.0/jobs/runs/get'.format(host) + return f'https://{host}/api/2.0/jobs/runs/get' def cancel_run_endpoint(host): """ Utility function to generate the get run endpoint given the host. """ - return 'https://{}/api/2.0/jobs/runs/cancel'.format(host) + return f'https://{host}/api/2.0/jobs/runs/cancel' def start_cluster_endpoint(host): """ Utility function to generate the get run endpoint given the host. """ - return 'https://{}/api/2.0/clusters/start'.format(host) + return f'https://{host}/api/2.0/clusters/start' def restart_cluster_endpoint(host): """ Utility function to generate the get run endpoint given the host. """ - return 'https://{}/api/2.0/clusters/restart'.format(host) + return f'https://{host}/api/2.0/clusters/restart' def terminate_cluster_endpoint(host): """ Utility function to generate the get run endpoint given the host. """ - return 'https://{}/api/2.0/clusters/delete'.format(host) + return f'https://{host}/api/2.0/clusters/delete' def create_valid_response_mock(content): diff --git a/tests/providers/databricks/operators/test_databricks.py b/tests/providers/databricks/operators/test_databricks.py index 007eaa22bf9dc..c26e464f96795 100644 --- a/tests/providers/databricks/operators/test_databricks.py +++ b/tests/providers/databricks/operators/test_databricks.py @@ -35,7 +35,7 @@ DEFAULT_CONN_ID = 'databricks_default' NOTEBOOK_TASK = {'notebook_path': '/test'} TEMPLATED_NOTEBOOK_TASK = {'notebook_path': '/test-{{ ds }}'} -RENDERED_TEMPLATED_NOTEBOOK_TASK = {'notebook_path': '/test-{0}'.format(DATE)} +RENDERED_TEMPLATED_NOTEBOOK_TASK = {'notebook_path': f'/test-{DATE}'} SPARK_JAR_TASK = {'main_class_name': 'com.databricks.Test'} SPARK_PYTHON_TASK = {'python_file': 'test.py', 'parameters': ['--param', '123']} SPARK_SUBMIT_TASK = { @@ -48,7 +48,7 @@ JOB_ID = 42 NOTEBOOK_PARAMS = {"dry-run": "true", "oldest-time-to-consider": "1457570074236"} JAR_PARAMS = ["param1", "param2"] -RENDERED_TEMPLATED_JAR_PARAMS = ['/test-{0}'.format(DATE)] +RENDERED_TEMPLATED_JAR_PARAMS = [f'/test-{DATE}'] TEMPLATED_JAR_PARAMS = ['/test-{{ ds }}'] PYTHON_PARAMS = ["john doe", "35"] SPARK_SUBMIT_PARAMS = ["--class", "org.apache.spark.examples.SparkPi"] diff --git a/tests/providers/elasticsearch/log/elasticmock/__init__.py b/tests/providers/elasticsearch/log/elasticmock/__init__.py index 6df234e910bf6..2490dbe7ae23d 100644 --- a/tests/providers/elasticsearch/log/elasticmock/__init__.py +++ b/tests/providers/elasticsearch/log/elasticmock/__init__.py @@ -51,7 +51,7 @@ def _get_elasticmock(hosts=None, *args, **kwargs): # pylint: disable=unused-argument host = _normalize_hosts(hosts)[0] - elastic_key = '{0}:{1}'.format(host.get('host', 'localhost'), host.get('port', 9200)) + elastic_key = '{}:{}'.format(host.get('host', 'localhost'), host.get('port', 9200)) if elastic_key in ELASTIC_INSTANCES: connection = ELASTIC_INSTANCES.get(elastic_key) diff --git a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py index 7b5a2c7b68697..21219724de8bd 100644 --- a/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py +++ b/tests/providers/elasticsearch/log/elasticmock/fake_elasticsearch.py @@ -302,12 +302,12 @@ def delete(self, index, doc_type, id, params=None): @query_params('allow_no_indices', 'expand_wildcards', 'ignore_unavailable', 'preference', 'routing') def suggest(self, body, index=None): if index is not None and index not in self.__documents_dict: - raise NotFoundError(404, 'IndexMissingException[[{0}] missing]'.format(index)) + raise NotFoundError(404, f'IndexMissingException[[{index}] missing]') result_dict = {} for key, value in body.items(): text = value.get('text') - suggestion = int(text) + 1 if isinstance(text, int) else '{0}_suggestion'.format(text) + suggestion = int(text) + 1 if isinstance(text, int) else f'{text}_suggestion' result_dict[key] = [ { 'text': text, @@ -364,7 +364,7 @@ def _normalize_index_to_list(self, index): # Check index(es) exists for searchable_index in searchable_indexes: if searchable_index not in self.__documents_dict: - raise NotFoundError(404, 'IndexMissingException[[{0}] missing]'.format(searchable_index)) + raise NotFoundError(404, f'IndexMissingException[[{searchable_index}] missing]') return searchable_indexes diff --git a/tests/providers/elasticsearch/log/test_es_task_handler.py b/tests/providers/elasticsearch/log/test_es_task_handler.py index 5423df3de797d..404e70853cb88 100644 --- a/tests/providers/elasticsearch/log/test_es_task_handler.py +++ b/tests/providers/elasticsearch/log/test_es_task_handler.py @@ -42,7 +42,7 @@ class TestElasticsearchTaskHandler(unittest.TestCase): DAG_ID = 'dag_for_testing_file_task_handler' TASK_ID = 'task_for_testing_file_log_handler' EXECUTION_DATE = datetime(2016, 1, 1) - LOG_ID = '{dag_id}-{task_id}-2016-01-01T00:00:00+00:00-1'.format(dag_id=DAG_ID, task_id=TASK_ID) + LOG_ID = f'{DAG_ID}-{TASK_ID}-2016-01-01T00:00:00+00:00-1' @elasticmock def setUp(self): @@ -258,7 +258,7 @@ def test_close(self): self.es_task_handler.set_context(self.ti) self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)), 'r' + os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) ) as log_file: # end_of_log_mark may contain characters like '\n' which is needed to # have the log uploaded but will not be stored in elasticsearch. @@ -272,7 +272,7 @@ def test_close_no_mark_end(self): self.es_task_handler.set_context(self.ti) self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)), 'r' + os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) ) as log_file: self.assertNotIn(self.end_of_log_mark, log_file.read()) self.assertTrue(self.es_task_handler.closed) @@ -282,7 +282,7 @@ def test_close_closed(self): self.es_task_handler.set_context(self.ti) self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)), 'r' + os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) ) as log_file: self.assertEqual(0, len(log_file.read())) @@ -291,7 +291,7 @@ def test_close_with_no_handler(self): self.es_task_handler.handler = None self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)), 'r' + os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) ) as log_file: self.assertEqual(0, len(log_file.read())) self.assertTrue(self.es_task_handler.closed) @@ -301,7 +301,7 @@ def test_close_with_no_stream(self): self.es_task_handler.handler.stream = None self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)), 'r' + os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) ) as log_file: self.assertIn(self.end_of_log_mark, log_file.read()) self.assertTrue(self.es_task_handler.closed) @@ -310,7 +310,7 @@ def test_close_with_no_stream(self): self.es_task_handler.handler.stream.close() self.es_task_handler.close() with open( - os.path.join(self.local_log_location, self.filename_template.format(try_number=1)), 'r' + os.path.join(self.local_log_location, self.filename_template.format(try_number=1)) ) as log_file: self.assertIn(self.end_of_log_mark, log_file.read()) self.assertTrue(self.es_task_handler.closed) diff --git a/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py b/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py index 7142b6e6f140a..c842fc75cbf4d 100644 --- a/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py +++ b/tests/providers/google/cloud/_internal_client/test_secret_manager_client.py @@ -77,7 +77,7 @@ def test_get_existing_key(self, mock_client_info, mock_secrets_client): mock_secrets_client.return_value = mock_client mock_client.secret_version_path.return_value = "full-path" test_response = AccessSecretVersionResponse() - test_response.payload.data = "result".encode() + test_response.payload.data = b"result" mock_client.access_secret_version.return_value = test_response secrets_client = _SecretManagerClient(credentials="credentials") secret = secrets_client.get_secret(secret_id="existing", project_id="project_id") @@ -93,7 +93,7 @@ def test_get_existing_key_with_version(self, mock_client_info, mock_secrets_clie mock_secrets_client.return_value = mock_client mock_client.secret_version_path.return_value = "full-path" test_response = AccessSecretVersionResponse() - test_response.payload.data = "result".encode() + test_response.payload.data = b"result" mock_client.access_secret_version.return_value = test_response secrets_client = _SecretManagerClient(credentials="credentials") secret = secrets_client.get_secret( diff --git a/tests/providers/google/cloud/hooks/test_bigquery.py b/tests/providers/google/cloud/hooks/test_bigquery.py index f03a6ef27a4ef..4847073ee8324 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery.py +++ b/tests/providers/google/cloud/hooks/test_bigquery.py @@ -416,7 +416,7 @@ def test_run_load_with_non_csv_as_src_fmt(self, fmt, _): @mock.patch("airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.insert_job") def test_run_extract(self, mock_insert): - source_project_dataset_table = "{}.{}.{}".format(PROJECT_ID, DATASET_ID, TABLE_ID) + source_project_dataset_table = f"{PROJECT_ID}.{DATASET_ID}.{TABLE_ID}" destination_cloud_storage_uris = ["gs://bucket/file.csv"] expected_configuration = { "extract": { @@ -1658,7 +1658,7 @@ class TestBigQueryBaseCursorMethodsDeprecationWarning(unittest.TestCase): def test_deprecation_warning(self, func_name, mock_bq_hook): args, kwargs = [1], {"param1": "val1"} new_path = re.escape(f"`airflow.providers.google.cloud.hooks.bigquery.BigQueryHook.{func_name}`") - message_pattern = r"This method is deprecated\.\s+Please use {}".format(new_path) + message_pattern = fr"This method is deprecated\.\s+Please use {new_path}" message_regex = re.compile(message_pattern, re.MULTILINE) mocked_func = getattr(mock_bq_hook, func_name) @@ -1669,4 +1669,4 @@ def test_deprecation_warning(self, func_name, mock_bq_hook): _ = func(*args, **kwargs) mocked_func.assert_called_once_with(*args, **kwargs) - self.assertRegex(func.__doc__, ".*{}.*".format(new_path)) + self.assertRegex(func.__doc__, f".*{new_path}.*") diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/tests/providers/google/cloud/hooks/test_dataflow.py index b56ac251707df..9de46a5b81e09 100644 --- a/tests/providers/google/cloud/hooks/test_dataflow.py +++ b/tests/providers/google/cloud/hooks/test_dataflow.py @@ -40,7 +40,7 @@ TASK_ID = 'test-dataflow-operator' JOB_NAME = 'test-dataflow-pipeline' MOCK_UUID = '12345678' -UNIQUE_JOB_NAME = 'test-dataflow-pipeline-{}'.format(MOCK_UUID) +UNIQUE_JOB_NAME = f'test-dataflow-pipeline-{MOCK_UUID}' TEST_TEMPLATE = 'gs://dataflow-templates/wordcount/template_file' PARAMETERS = { 'inputFile': 'gs://dataflow-samples/shakespeare/kinglear.txt', @@ -195,7 +195,7 @@ def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', - '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--job_name={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -229,7 +229,7 @@ def test_start_python_dataflow_with_custom_region_as_variable( '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', - '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--job_name={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -262,7 +262,7 @@ def test_start_python_dataflow_with_custom_region_as_paramater( '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', - '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--job_name={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -299,7 +299,7 @@ def test_start_python_dataflow_with_multiple_extra_packages( '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', - '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--job_name={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -347,7 +347,7 @@ def test_start_python_dataflow_with_custom_interpreter( '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', - '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--job_name={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -397,7 +397,7 @@ def test_start_python_dataflow_with_non_empty_py_requirements_and_without_system '--project=test', '--labels=foo=bar', '--staging_location=gs://test/staging', - '--job_name={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--job_name={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -446,7 +446,7 @@ def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, m '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', - '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--jobName={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual( sorted(expected_cmd), @@ -483,7 +483,7 @@ def test_start_java_dataflow_with_multiple_values_in_variables( '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', - '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--jobName={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -516,7 +516,7 @@ def test_start_java_dataflow_with_custom_region_as_variable( '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', - '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--jobName={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual( sorted(expected_cmd), @@ -552,7 +552,7 @@ def test_start_java_dataflow_with_custom_region_as_parameter( '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', - '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--jobName={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual( sorted(expected_cmd), @@ -583,7 +583,7 @@ def test_start_java_dataflow_with_job_class(self, mock_conn, mock_dataflow, mock '--project=test', '--stagingLocation=gs://test/staging', '--labels={"foo":"bar"}', - '--jobName={}-{}'.format(JOB_NAME, MOCK_UUID), + f'--jobName={JOB_NAME}-{MOCK_UUID}', ] self.assertListEqual(sorted(mock_dataflow.call_args[1]["cmd"]), sorted(expected_cmd)) @@ -771,7 +771,7 @@ def test_start_template_dataflow_with_runtime_env(self, mock_conn, mock_dataflow dataflow=mock_conn.return_value, job_id=TEST_JOB_ID, location=DEFAULT_DATAFLOW_LOCATION, - name='test-dataflow-pipeline-{}'.format(MOCK_UUID), + name=f'test-dataflow-pipeline-{MOCK_UUID}', num_retries=5, poll_sleep=10, project_number=TEST_PROJECT, @@ -818,7 +818,7 @@ def test_start_template_dataflow_update_runtime_env(self, mock_conn, mock_datafl dataflow=mock_conn.return_value, job_id=TEST_JOB_ID, location=DEFAULT_DATAFLOW_LOCATION, - name='test-dataflow-pipeline-{}'.format(MOCK_UUID), + name=f'test-dataflow-pipeline-{MOCK_UUID}', num_retries=5, poll_sleep=10, project_number=TEST_PROJECT, diff --git a/tests/providers/google/cloud/hooks/test_datafusion.py b/tests/providers/google/cloud/hooks/test_datafusion.py index a10f2258f5292..294f8c5b69ef5 100644 --- a/tests/providers/google/cloud/hooks/test_datafusion.py +++ b/tests/providers/google/cloud/hooks/test_datafusion.py @@ -185,7 +185,7 @@ def test_list_pipelines(self, mock_request, hook): @mock.patch(HOOK_STR.format("DataFusionHook.wait_for_pipeline_state")) def test_start_pipeline(self, mock_wait_for_pipeline_state, mock_request, hook): run_id = 1234 - mock_request.return_value = mock.MagicMock(status=200, data='[{{"runId":{}}}]'.format(run_id)) + mock_request.return_value = mock.MagicMock(status=200, data=f'[{{"runId":{run_id}}}]') hook.start_pipeline(pipeline_name=PIPELINE_NAME, instance_url=INSTANCE_URL, runtime_args=RUNTIME_ARGS) body = [ diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/tests/providers/google/cloud/hooks/test_dataproc.py index 7aa176cfd5616..ebca9f5f340d7 100644 --- a/tests/providers/google/cloud/hooks/test_dataproc.py +++ b/tests/providers/google/cloud/hooks/test_dataproc.py @@ -65,7 +65,7 @@ def test_get_cluster_client(self, mock_client, mock_client_info, mock_get_creden mock_client.assert_called_once_with( credentials=mock_get_credentials.return_value, client_info=mock_client_info.return_value, - client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(GCP_LOCATION)}, + client_options={"api_endpoint": f"{GCP_LOCATION}-dataproc.googleapis.com:443"}, ) @mock.patch(DATAPROC_STRING.format("DataprocHook._get_credentials")) @@ -85,7 +85,7 @@ def test_get_job_client(self, mock_client, mock_client_info, mock_get_credential mock_client.assert_called_once_with( credentials=mock_get_credentials.return_value, client_info=mock_client_info.return_value, - client_options={"api_endpoint": "{}-dataproc.googleapis.com:443".format(GCP_LOCATION)}, + client_options={"api_endpoint": f"{GCP_LOCATION}-dataproc.googleapis.com:443"}, ) @mock.patch(DATAPROC_STRING.format("DataprocHook.get_cluster_client")) diff --git a/tests/providers/google/cloud/hooks/test_dlp.py b/tests/providers/google/cloud/hooks/test_dlp.py index 055caa52f4586..7e5c5c8dc601f 100644 --- a/tests/providers/google/cloud/hooks/test_dlp.py +++ b/tests/providers/google/cloud/hooks/test_dlp.py @@ -35,11 +35,11 @@ API_RESPONSE = {} # type: Dict[Any, Any] ORGANIZATION_ID = "test-org" -ORGANIZATION_PATH = "organizations/{}".format(ORGANIZATION_ID) +ORGANIZATION_PATH = f"organizations/{ORGANIZATION_ID}" PROJECT_ID = "test-project" -PROJECT_PATH = "projects/{}".format(PROJECT_ID) +PROJECT_PATH = f"projects/{PROJECT_ID}" DLP_JOB_ID = "job123" -DLP_JOB_PATH = "projects/{}/dlpJobs/{}".format(PROJECT_ID, DLP_JOB_ID) +DLP_JOB_PATH = f"projects/{PROJECT_ID}/dlpJobs/{DLP_JOB_ID}" TEMPLATE_ID = "template123" STORED_INFO_TYPE_ID = "type123" TRIGGER_ID = "trigger123" @@ -52,10 +52,10 @@ STORED_INFO_TYPE_ORGANIZATION_PATH = "organizations/{}/storedInfoTypes/{}".format( ORGANIZATION_ID, STORED_INFO_TYPE_ID ) -DEIDENTIFY_TEMPLATE_PROJECT_PATH = "projects/{}/deidentifyTemplates/{}".format(PROJECT_ID, TEMPLATE_ID) -INSPECT_TEMPLATE_PROJECT_PATH = "projects/{}/inspectTemplates/{}".format(PROJECT_ID, TEMPLATE_ID) -STORED_INFO_TYPE_PROJECT_PATH = "projects/{}/storedInfoTypes/{}".format(PROJECT_ID, STORED_INFO_TYPE_ID) -JOB_TRIGGER_PATH = "projects/{}/jobTriggers/{}".format(PROJECT_ID, TRIGGER_ID) +DEIDENTIFY_TEMPLATE_PROJECT_PATH = f"projects/{PROJECT_ID}/deidentifyTemplates/{TEMPLATE_ID}" +INSPECT_TEMPLATE_PROJECT_PATH = f"projects/{PROJECT_ID}/inspectTemplates/{TEMPLATE_ID}" +STORED_INFO_TYPE_PROJECT_PATH = f"projects/{PROJECT_ID}/storedInfoTypes/{STORED_INFO_TYPE_ID}" +JOB_TRIGGER_PATH = f"projects/{PROJECT_ID}/jobTriggers/{TRIGGER_ID}" class TestCloudDLPHook(unittest.TestCase): diff --git a/tests/providers/google/cloud/hooks/test_functions.py b/tests/providers/google/cloud/hooks/test_functions.py index 69d19f263dfeb..02fd13847492e 100644 --- a/tests/providers/google/cloud/hooks/test_functions.py +++ b/tests/providers/google/cloud/hooks/test_functions.py @@ -75,7 +75,7 @@ def test_create_new_function_overridden_project_id(self, wait_for_operation_to_c @mock.patch('airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook.get_conn') def test_upload_function_zip_overridden_project_id(self, get_conn, requests_put): mck, open_module = get_open_mock() - with mock.patch('{}.open'.format(open_module), mck): + with mock.patch(f'{open_module}.open', mck): # fmt: off generate_upload_url_method = get_conn.return_value.projects.return_value.locations. \ return_value.functions.return_value.generateUploadUrl @@ -220,7 +220,7 @@ def test_update_function(self, wait_for_operation_to_complete, get_conn): @mock.patch('airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook.get_conn') def test_upload_function_zip(self, get_conn, requests_put, mock_project_id): mck, open_module = get_open_mock() - with mock.patch('{}.open'.format(open_module), mck): + with mock.patch(f'{open_module}.open', mck): # fmt: off generate_upload_url_method = get_conn.return_value.projects.return_value.locations. \ return_value.functions.return_value.generateUploadUrl @@ -248,7 +248,7 @@ def test_upload_function_zip(self, get_conn, requests_put, mock_project_id): @mock.patch('airflow.providers.google.cloud.hooks.functions.CloudFunctionsHook.get_conn') def test_upload_function_zip_overridden_project_id(self, get_conn, requests_put): mck, open_module = get_open_mock() - with mock.patch('{}.open'.format(open_module), mck): + with mock.patch(f'{open_module}.open', mck): # fmt: off generate_upload_url_method = get_conn.return_value.projects.return_value.locations. \ return_value.functions.return_value.generateUploadUrl diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/tests/providers/google/cloud/hooks/test_gcs.py index 9de2b285dbfdd..9b38f41ae7acc 100644 --- a/tests/providers/google/cloud/hooks/test_gcs.py +++ b/tests/providers/google/cloud/hooks/test_gcs.py @@ -1210,13 +1210,13 @@ def test_should_not_overwrite_when_overwrite_is_disabled( mock_copy.assert_not_called() def _create_blob(self, name: str, crc32: str, bucket=None): - blob = mock.MagicMock(name="BLOB:{}".format(name)) + blob = mock.MagicMock(name=f"BLOB:{name}") blob.name = name blob.crc32 = crc32 blob.bucket = bucket return blob def _create_bucket(self, name: str): - bucket = mock.MagicMock(name="BUCKET:{}".format(name)) + bucket = mock.MagicMock(name=f"BUCKET:{name}") bucket.name = name return bucket diff --git a/tests/providers/google/cloud/hooks/test_mlengine.py b/tests/providers/google/cloud/hooks/test_mlengine.py index 10e1078dfd535..f8381e829ff97 100644 --- a/tests/providers/google/cloud/hooks/test_mlengine.py +++ b/tests/providers/google/cloud/hooks/test_mlengine.py @@ -53,8 +53,8 @@ def test_create_version(self, mock_get_conn): 'name': 'test-version', 'labels': {'other-label': 'test-value', 'airflow-version': hook._AIRFLOW_VERSION}, } - operation_path = 'projects/{}/operations/test-operation'.format(project_id) - model_path = 'projects/{}/models/{}'.format(project_id, model_name) + operation_path = f'projects/{project_id}/operations/test-operation' + model_path = f'projects/{project_id}/models/{model_name}' operation_done = {'name': operation_path, 'done': True} # fmt: off ( @@ -102,8 +102,8 @@ def test_create_version_with_labels(self, mock_get_conn): 'name': 'test-version', 'labels': {'airflow-version': hook._AIRFLOW_VERSION}, } - operation_path = 'projects/{}/operations/test-operation'.format(project_id) - model_path = 'projects/{}/models/{}'.format(project_id, model_name) + operation_path = f'projects/{project_id}/operations/test-operation' + model_path = f'projects/{project_id}/models/{model_name}' operation_done = {'name': operation_path, 'done': True} # fmt: off ( @@ -149,8 +149,8 @@ def test_set_default_version(self, mock_get_conn): project_id = 'test-project' model_name = 'test-model' version_name = 'test-version' - operation_path = 'projects/{}/operations/test-operation'.format(project_id) - version_path = 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name) + operation_path = f'projects/{project_id}/operations/test-operation' + version_path = f'projects/{project_id}/models/{model_name}/versions/{version_name}' operation_done = {'name': operation_path, 'done': True} # fmt: off ( @@ -181,11 +181,10 @@ def test_set_default_version(self, mock_get_conn): def test_list_versions(self, mock_get_conn, mock_sleep): project_id = 'test-project' model_name = 'test-model' - model_path = 'projects/{}/models/{}'.format(project_id, model_name) - version_names = ['ver_{}'.format(ix) for ix in range(3)] + model_path = f'projects/{project_id}/models/{model_name}' + version_names = [f'ver_{ix}' for ix in range(3)] response_bodies = [ - {'nextPageToken': "TOKEN-{}".format(ix), 'versions': [ver]} - for ix, ver in enumerate(version_names) + {'nextPageToken': f"TOKEN-{ix}", 'versions': [ver]} for ix, ver in enumerate(version_names) ] response_bodies[-1].pop('nextPageToken') @@ -226,8 +225,8 @@ def test_delete_version(self, mock_get_conn): project_id = 'test-project' model_name = 'test-model' version_name = 'test-version' - operation_path = 'projects/{}/operations/test-operation'.format(project_id) - version_path = 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name) + operation_path = f'projects/{project_id}/operations/test-operation' + version_path = f'projects/{project_id}/models/{model_name}/versions/{version_name}' version = {'name': operation_path} operation_not_done = {'name': operation_path, 'done': False} operation_done = {'name': operation_path, 'done': True} @@ -275,7 +274,7 @@ def test_create_model(self, mock_get_conn): 'name': model_name, 'labels': {'airflow-version': hook._AIRFLOW_VERSION}, } - project_path = 'projects/{}'.format(project_id) + project_path = f'projects/{project_id}' # fmt: off ( mock_get_conn.return_value. @@ -306,7 +305,7 @@ def test_create_model_idempotency(self, mock_get_conn): 'name': model_name, 'labels': {'airflow-version': hook._AIRFLOW_VERSION}, } - project_path = 'projects/{}'.format(project_id) + project_path = f'projects/{project_id}' # fmt: off ( mock_get_conn.return_value. @@ -373,7 +372,7 @@ def test_create_model_with_labels(self, mock_get_conn): 'name': model_name, 'labels': {'other-label': 'test-value', 'airflow-version': hook._AIRFLOW_VERSION}, } - project_path = 'projects/{}'.format(project_id) + project_path = f'projects/{project_id}' # fmt: off ( mock_get_conn.return_value. @@ -400,7 +399,7 @@ def test_get_model(self, mock_get_conn): project_id = 'test-project' model_name = 'test-model' model = {'model': model_name} - model_path = 'projects/{}/models/{}'.format(project_id, model_name) + model_path = f'projects/{project_id}/models/{model_name}' # fmt: off ( mock_get_conn.return_value. @@ -425,7 +424,7 @@ def test_delete_model(self, mock_get_conn): project_id = 'test-project' model_name = 'test-model' model = {'model': model_name} - model_path = 'projects/{}/models/{}'.format(project_id, model_name) + model_path = f'projects/{project_id}/models/{model_name}' # fmt: off ( mock_get_conn.return_value. @@ -449,7 +448,7 @@ def test_delete_model(self, mock_get_conn): def test_delete_model_when_not_exists(self, mock_get_conn, mock_log): project_id = 'test-project' model_name = 'test-model' - model_path = 'projects/{}/models/{}'.format(project_id, model_name) + model_path = f'projects/{project_id}/models/{model_name}' http_error = HttpError( resp=mock.MagicMock(status=404, reason="Model not found."), content=b'Model not found.' ) @@ -477,13 +476,13 @@ def test_delete_model_when_not_exists(self, mock_get_conn, mock_log): def test_delete_model_with_contents(self, mock_get_conn, mock_sleep): project_id = 'test-project' model_name = 'test-model' - model_path = 'projects/{}/models/{}'.format(project_id, model_name) - operation_path = 'projects/{}/operations/test-operation'.format(project_id) + model_path = f'projects/{project_id}/models/{model_name}' + operation_path = f'projects/{project_id}/operations/test-operation' operation_done = {'name': operation_path, 'done': True} version_names = ["AAA", "BBB", "CCC"] versions = [ { - 'name': 'projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name), + 'name': f'projects/{project_id}/models/{model_name}/versions/{version_name}', "isDefault": i == 0, } for i, version_name in enumerate(version_names) @@ -525,7 +524,7 @@ def test_delete_model_with_contents(self, mock_get_conn, mock_sleep): .models() .versions() .delete( - name='projects/{}/models/{}/versions/{}'.format(project_id, model_name, version_name), + name=f'projects/{project_id}/models/{model_name}/versions/{version_name}', ) for version_name in version_names ], @@ -537,8 +536,8 @@ def test_delete_model_with_contents(self, mock_get_conn, mock_sleep): def test_create_mlengine_job(self, mock_get_conn, mock_sleep): project_id = 'test-project' job_id = 'test-job-id' - project_path = 'projects/{}'.format(project_id) - job_path = 'projects/{}/jobs/{}'.format(project_id, job_id) + project_path = f'projects/{project_id}' + job_path = f'projects/{project_id}/jobs/{job_id}' new_job = { 'jobId': job_id, 'foo': 4815162342, @@ -590,8 +589,8 @@ def test_create_mlengine_job(self, mock_get_conn, mock_sleep): def test_create_mlengine_job_with_labels(self, mock_get_conn, mock_sleep): project_id = 'test-project' job_id = 'test-job-id' - project_path = 'projects/{}'.format(project_id) - job_path = 'projects/{}/jobs/{}'.format(project_id, job_id) + project_path = f'projects/{project_id}' + job_path = f'projects/{project_id}/jobs/{job_id}' new_job = {'jobId': job_id, 'foo': 4815162342, 'labels': {'other-label': 'test-value'}} new_job_with_airflow_version = { 'jobId': job_id, @@ -641,8 +640,8 @@ def test_create_mlengine_job_with_labels(self, mock_get_conn, mock_sleep): def test_create_mlengine_job_reuse_existing_job_by_default(self, mock_get_conn): project_id = 'test-project' job_id = 'test-job-id' - project_path = 'projects/{}'.format(project_id) - job_path = 'projects/{}/jobs/{}'.format(project_id, job_id) + project_path = f'projects/{project_id}' + job_path = f'projects/{project_id}/jobs/{job_id}' job_succeeded = { 'jobId': job_id, 'foo': 4815162342, @@ -760,7 +759,7 @@ def check_input(existing_job): def test_cancel_mlengine_job(self, mock_get_conn): project_id = "test-project" job_id = 'test-job-id' - job_path = 'projects/{}/jobs/{}'.format(project_id, job_id) + job_path = f'projects/{project_id}/jobs/{job_id}' job_cancelled = {} # fmt: off @@ -813,7 +812,7 @@ def test_cancel_mlengine_job_nonexistent_job(self, mock_get_conn): def test_cancel_mlengine_job_completed_job(self, mock_get_conn): project_id = "test-project" job_id = 'test-job-id' - job_path = 'projects/{}/jobs/{}'.format(project_id, job_id) + job_path = f'projects/{project_id}/jobs/{job_id}' job_cancelled = {} error_job_already_completed = HttpError( @@ -866,8 +865,8 @@ def test_create_version(self, mock_get_conn, mock_project_id): model_name = 'test-model' version_name = 'test-version' version = {'name': version_name} - operation_path = 'projects/{}/operations/test-operation'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST) - model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name) + operation_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/operations/test-operation' + model_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/models/{model_name}' operation_done = {'name': operation_path, 'done': True} # fmt: off ( @@ -910,7 +909,7 @@ def test_create_version(self, mock_get_conn, mock_project_id): def test_set_default_version(self, mock_get_conn, mock_project_id): model_name = 'test-model' version_name = 'test-version' - operation_path = 'projects/{}/operations/test-operation'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST) + operation_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/operations/test-operation' version_path = 'projects/{}/models/{}/versions/{}'.format( GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name, version_name ) @@ -949,11 +948,10 @@ def test_set_default_version(self, mock_get_conn, mock_project_id): @mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineHook.get_conn") def test_list_versions(self, mock_get_conn, mock_sleep, mock_project_id): model_name = 'test-model' - model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name) - version_names = ['ver_{}'.format(ix) for ix in range(3)] + model_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/models/{model_name}' + version_names = [f'ver_{ix}' for ix in range(3)] response_bodies = [ - {'nextPageToken': "TOKEN-{}".format(ix), 'versions': [ver]} - for ix, ver in enumerate(version_names) + {'nextPageToken': f"TOKEN-{ix}", 'versions': [ver]} for ix, ver in enumerate(version_names) ] response_bodies[-1].pop('nextPageToken') @@ -1053,7 +1051,7 @@ def test_create_model(self, mock_get_conn, mock_project_id): model = { 'name': model_name, } - project_path = 'projects/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST) + project_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}' # fmt: off ( mock_get_conn.return_value. @@ -1082,7 +1080,7 @@ def test_create_model(self, mock_get_conn, mock_project_id): def test_get_model(self, mock_get_conn, mock_project_id): model_name = 'test-model' model = {'model': model_name} - model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name) + model_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/models/{model_name}' # fmt: off ( mock_get_conn.return_value. @@ -1113,7 +1111,7 @@ def test_get_model(self, mock_get_conn, mock_project_id): def test_delete_model(self, mock_get_conn, mock_project_id): model_name = 'test-model' model = {'model': model_name} - model_path = 'projects/{}/models/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, model_name) + model_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/models/{model_name}' # fmt: off ( mock_get_conn.return_value. @@ -1141,8 +1139,8 @@ def test_delete_model(self, mock_get_conn, mock_project_id): @mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineHook.get_conn") def test_create_mlengine_job(self, mock_get_conn, mock_sleep, mock_project_id): job_id = 'test-job-id' - project_path = 'projects/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST) - job_path = 'projects/{}/jobs/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, job_id) + project_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}' + job_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/jobs/{job_id}' new_job = { 'jobId': job_id, 'foo': 4815162342, @@ -1191,7 +1189,7 @@ def test_create_mlengine_job(self, mock_get_conn, mock_sleep, mock_project_id): @mock.patch("airflow.providers.google.cloud.hooks.mlengine.MLEngineHook.get_conn") def test_cancel_mlengine_job(self, mock_get_conn, mock_project_id): job_id = 'test-job-id' - job_path = 'projects/{}/jobs/{}'.format(GCP_PROJECT_ID_HOOK_UNIT_TEST, job_id) + job_path = f'projects/{GCP_PROJECT_ID_HOOK_UNIT_TEST}/jobs/{job_id}' job_cancelled = {} # fmt: off diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/tests/providers/google/cloud/hooks/test_pubsub.py index 453496895e2b1..7c1fcad6953eb 100644 --- a/tests/providers/google/cloud/hooks/test_pubsub.py +++ b/tests/providers/google/cloud/hooks/test_pubsub.py @@ -44,8 +44,8 @@ {'attributes': {'foo': ''}}, ] -EXPANDED_TOPIC = 'projects/{}/topics/{}'.format(TEST_PROJECT, TEST_TOPIC) -EXPANDED_SUBSCRIPTION = 'projects/{}/subscriptions/{}'.format(TEST_PROJECT, TEST_SUBSCRIPTION) +EXPANDED_TOPIC = f'projects/{TEST_PROJECT}/topics/{TEST_TOPIC}' +EXPANDED_SUBSCRIPTION = f'projects/{TEST_PROJECT}/subscriptions/{TEST_SUBSCRIPTION}' LABELS = {'airflow-version': 'v' + version.replace('.', '-').replace('+', '-')} @@ -382,9 +382,7 @@ def test_publish(self, mock_service): @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_publish_api_call_error(self, mock_service): publish_method = mock_service.return_value.publish - publish_method.side_effect = GoogleAPICallError( - 'Error publishing to topic {}'.format(EXPANDED_SUBSCRIPTION) - ) + publish_method.side_effect = GoogleAPICallError(f'Error publishing to topic {EXPANDED_SUBSCRIPTION}') with self.assertRaises(PubSubException): self.pubsub_hook.publish(project_id=TEST_PROJECT, topic=TEST_TOPIC, messages=TEST_MESSAGES) diff --git a/tests/providers/google/cloud/hooks/test_secret_manager.py b/tests/providers/google/cloud/hooks/test_secret_manager.py index e6216e1e59d5f..37a492d740933 100644 --- a/tests/providers/google/cloud/hooks/test_secret_manager.py +++ b/tests/providers/google/cloud/hooks/test_secret_manager.py @@ -59,7 +59,7 @@ def test_get_missing_key(self, mock_get_credentials, mock_client): def test_get_existing_key(self, mock_get_credentials, mock_client): mock_client.secret_version_path.return_value = "full-path" test_response = AccessSecretVersionResponse() - test_response.payload.data = "result".encode() + test_response.payload.data = b"result" mock_client.access_secret_version.return_value = test_response secrets_manager_hook = SecretsManagerHook(gcp_conn_id='test') mock_get_credentials.assert_called_once_with() diff --git a/tests/providers/google/cloud/hooks/test_stackdriver.py b/tests/providers/google/cloud/hooks/test_stackdriver.py index 722aa67aa4f25..ae16064829364 100644 --- a/tests/providers/google/cloud/hooks/test_stackdriver.py +++ b/tests/providers/google/cloud/hooks/test_stackdriver.py @@ -96,7 +96,7 @@ def test_stackdriver_list_alert_policies(self, mock_policy_client, mock_get_cred project_id=PROJECT_ID, ) method.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', filter_=TEST_FILTER, retry=DEFAULT, timeout=DEFAULT, @@ -124,7 +124,7 @@ def test_stackdriver_enable_alert_policy(self, mock_policy_client, mock_get_cred project_id=PROJECT_ID, ) mock_policy_client.return_value.list_alert_policies.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', filter_=TEST_FILTER, retry=DEFAULT, timeout=DEFAULT, @@ -162,7 +162,7 @@ def test_stackdriver_disable_alert_policy(self, mock_policy_client, mock_get_cre project_id=PROJECT_ID, ) mock_policy_client.return_value.list_alert_policies.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', filter_=TEST_FILTER, retry=DEFAULT, timeout=DEFAULT, @@ -202,7 +202,7 @@ def test_stackdriver_upsert_alert_policy( project_id=PROJECT_ID, ) mock_channel_client.return_value.list_notification_channels.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', filter_=None, retry=DEFAULT, timeout=DEFAULT, @@ -211,7 +211,7 @@ def test_stackdriver_upsert_alert_policy( metadata=None, ) mock_policy_client.return_value.list_alert_policies.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', filter_=None, retry=DEFAULT, timeout=DEFAULT, @@ -224,7 +224,7 @@ def test_stackdriver_upsert_alert_policy( alert_policy_to_create.ClearField('mutation_record') alert_policy_to_create.conditions[0].ClearField('name') # pylint: disable=no-member mock_policy_client.return_value.create_alert_policy.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', alert_policy=alert_policy_to_create, retry=DEFAULT, timeout=DEFAULT, @@ -265,7 +265,7 @@ def test_stackdriver_list_notification_channel(self, mock_channel_client, mock_g project_id=PROJECT_ID, ) mock_channel_client.return_value.list_notification_channels.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', filter_=TEST_FILTER, order_by=None, page_size=None, @@ -367,7 +367,7 @@ def test_stackdriver_upsert_channel(self, mock_channel_client, mock_get_creds_an project_id=PROJECT_ID, ) mock_channel_client.return_value.list_notification_channels.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', filter_=None, order_by=None, page_size=None, @@ -380,7 +380,7 @@ def test_stackdriver_upsert_channel(self, mock_channel_client, mock_get_creds_an ) notification_channel_to_be_created.ClearField('name') mock_channel_client.return_value.create_notification_channel.assert_called_once_with( - name='projects/{project}'.format(project=PROJECT_ID), + name=f'projects/{PROJECT_ID}', notification_channel=notification_channel_to_be_created, retry=DEFAULT, timeout=DEFAULT, diff --git a/tests/providers/google/cloud/hooks/test_vision.py b/tests/providers/google/cloud/hooks/test_vision.py index ad399516ade1c..f6cfbd882118f 100644 --- a/tests/providers/google/cloud/hooks/test_vision.py +++ b/tests/providers/google/cloud/hooks/test_vision.py @@ -47,7 +47,7 @@ PRODUCT_NAME_TEST = "projects/{}/locations/{}/products/{}".format( PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST ) -PRODUCT_NAME = "projects/{}/locations/{}/products/{}".format(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST) +PRODUCT_NAME = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}" REFERENCE_IMAGE_ID_TEST = 'ri-id' REFERENCE_IMAGE_GEN_ID_TEST = 'ri-id' ANNOTATE_IMAGE_REQUEST = { diff --git a/tests/providers/google/cloud/operators/test_bigquery.py b/tests/providers/google/cloud/operators/test_bigquery.py index 5b48b7544703f..b3558bcaddd4f 100644 --- a/tests/providers/google/cloud/operators/test_bigquery.py +++ b/tests/providers/google/cloud/operators/test_bigquery.py @@ -65,7 +65,7 @@ TEST_DAG_ID = 'test-bigquery-operators' TEST_TABLE_RESOURCES = {"tableReference": {"tableId": TEST_TABLE_ID}, "expirationTime": 1234567} VIEW_DEFINITION = { - "query": "SELECT * FROM `{}.{}`".format(TEST_DATASET, TEST_TABLE_ID), + "query": f"SELECT * FROM `{TEST_DATASET}.{TEST_TABLE_ID}`", "useLegacySql": False, } @@ -158,7 +158,7 @@ class TestBigQueryCreateExternalTableOperator(unittest.TestCase): def test_execute(self, mock_hook): operator = BigQueryCreateExternalTableOperator( task_id=TASK_ID, - destination_project_dataset_table='{}.{}'.format(TEST_DATASET, TEST_TABLE_ID), + destination_project_dataset_table=f'{TEST_DATASET}.{TEST_TABLE_ID}', schema_fields=[], bucket=TEST_GCS_BUCKET, source_objects=TEST_GCS_DATA, @@ -167,11 +167,9 @@ def test_execute(self, mock_hook): operator.execute(None) mock_hook.return_value.create_external_table.assert_called_once_with( - external_project_dataset_table='{}.{}'.format(TEST_DATASET, TEST_TABLE_ID), + external_project_dataset_table=f'{TEST_DATASET}.{TEST_TABLE_ID}', schema_fields=[], - source_uris=[ - 'gs://{}/{}'.format(TEST_GCS_BUCKET, source_object) for source_object in TEST_GCS_DATA - ], + source_uris=[f'gs://{TEST_GCS_BUCKET}/{source_object}' for source_object in TEST_GCS_DATA], source_format=TEST_SOURCE_FORMAT, compression='NONE', skip_leading_rows=0, @@ -597,7 +595,7 @@ def test_bigquery_operator_extra_link_when_single_query(self, mock_hook, session ti.xcom_push(key='job_id', value=job_id) self.assertEqual( - 'https://console.cloud.google.com/bigquery?j={job_id}'.format(job_id=job_id), + f'https://console.cloud.google.com/bigquery?j={job_id}', bigquery_task.get_extra_links(DEFAULT_DATE, BigQueryConsoleLink.name), ) diff --git a/tests/providers/google/cloud/operators/test_bigquery_dts_system.py b/tests/providers/google/cloud/operators/test_bigquery_dts_system.py index 839c9b45794dc..a0025376f5b98 100644 --- a/tests/providers/google/cloud/operators/test_bigquery_dts_system.py +++ b/tests/providers/google/cloud/operators/test_bigquery_dts_system.py @@ -40,7 +40,7 @@ def create_dataset(self, project_id: str, dataset: str, table: str): self.execute_with_ctx(["bq", "mk", "--table", table_name, ""], key=GCP_BIGQUERY_KEY) def upload_data(self, dataset: str, table: str, gcs_file: str): - table_name = "{}.{}".format(dataset, table) + table_name = f"{dataset}.{table}" self.execute_with_ctx( [ "bq", diff --git a/tests/providers/google/cloud/operators/test_bigtable.py b/tests/providers/google/cloud/operators/test_bigtable.py index 6e422e596af93..5e95ec956d1aa 100644 --- a/tests/providers/google/cloud/operators/test_bigtable.py +++ b/tests/providers/google/cloud/operators/test_bigtable.py @@ -79,7 +79,7 @@ def test_empty_attribute( gcp_conn_id=GCP_CONN_ID, ) err = e.exception - self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) + self.assertEqual(str(err), f'Empty parameter: {missing_attribute}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook') @@ -301,7 +301,7 @@ def test_empty_attribute(self, missing_attribute, project_id, instance_id, mock_ task_id="id", ) err = e.exception - self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) + self.assertEqual(str(err), f'Empty parameter: {missing_attribute}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook') @@ -322,7 +322,7 @@ def test_update_instance_that_doesnt_exists(self, mock_hook): op.execute(None) err = e.exception - self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(INSTANCE_ID)) + self.assertEqual(str(err), f"Dependency: instance '{INSTANCE_ID}' does not exist.") mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, @@ -347,7 +347,7 @@ def test_update_instance_that_doesnt_exists_empty_project_id(self, mock_hook): op.execute(None) err = e.exception - self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(INSTANCE_ID)) + self.assertEqual(str(err), f"Dependency: instance '{INSTANCE_ID}' does not exist.") mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, @@ -409,7 +409,7 @@ def test_empty_attribute(self, missing_attribute, project_id, instance_id, clust gcp_conn_id=GCP_CONN_ID, ) err = e.exception - self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) + self.assertEqual(str(err), f'Empty parameter: {missing_attribute}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook') @@ -429,7 +429,7 @@ def test_updating_cluster_but_instance_does_not_exists(self, mock_hook): op.execute(None) err = e.exception - self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(INSTANCE_ID)) + self.assertEqual(str(err), f"Dependency: instance '{INSTANCE_ID}' does not exist.") mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN, @@ -452,7 +452,7 @@ def test_updating_cluster_but_instance_does_not_exists_empty_project_id(self, mo op.execute(None) err = e.exception - self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(INSTANCE_ID)) + self.assertEqual(str(err), f"Dependency: instance '{INSTANCE_ID}' does not exist.") mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN, @@ -481,7 +481,7 @@ def test_updating_cluster_that_does_not_exists(self, mock_hook): err = e.exception self.assertEqual( str(err), - "Dependency: cluster '{}' does not exist for instance '{}'.".format(CLUSTER_ID, INSTANCE_ID), + f"Dependency: cluster '{CLUSTER_ID}' does not exist for instance '{INSTANCE_ID}'.", ) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, @@ -512,7 +512,7 @@ def test_updating_cluster_that_does_not_exists_empty_project_id(self, mock_hook) err = e.exception self.assertEqual( str(err), - "Dependency: cluster '{}' does not exist for instance '{}'.".format(CLUSTER_ID, INSTANCE_ID), + f"Dependency: cluster '{CLUSTER_ID}' does not exist for instance '{INSTANCE_ID}'.", ) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, @@ -597,7 +597,7 @@ def test_empty_attribute(self, missing_attribute, project_id, instance_id, mock_ with self.assertRaises(AirflowException) as e: BigtableDeleteInstanceOperator(project_id=project_id, instance_id=instance_id, task_id="id") err = e.exception - self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) + self.assertEqual(str(err), f'Empty parameter: {missing_attribute}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook') @@ -704,7 +704,7 @@ def test_empty_attribute(self, missing_attribute, project_id, instance_id, table gcp_conn_id=GCP_CONN_ID, ) err = e.exception - self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) + self.assertEqual(str(err), f'Empty parameter: {missing_attribute}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook') @@ -767,7 +767,7 @@ def test_deleting_table_when_instance_doesnt_exists(self, mock_hook): with self.assertRaises(AirflowException) as e: op.execute(None) err = e.exception - self.assertEqual(str(err), "Dependency: instance '{}' does not exist.".format(INSTANCE_ID)) + self.assertEqual(str(err), f"Dependency: instance '{INSTANCE_ID}' does not exist.") mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN, @@ -844,7 +844,7 @@ def test_empty_attribute(self, missing_attribute, project_id, instance_id, table gcp_conn_id=GCP_CONN_ID, ) err = e.exception - self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) + self.assertEqual(str(err), f'Empty parameter: {missing_attribute}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook') @@ -865,7 +865,7 @@ def test_instance_not_exists(self, mock_hook): err = e.exception self.assertEqual( str(err), - "Dependency: instance '{}' does not exist in project '{}'.".format(INSTANCE_ID, PROJECT_ID), + f"Dependency: instance '{INSTANCE_ID}' does not exist in project '{PROJECT_ID}'.", ) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, @@ -954,9 +954,7 @@ def test_creating_table_that_exists_with_different_column_families_ids_in_the_ta with self.assertRaises(AirflowException) as e: op.execute(None) err = e.exception - self.assertEqual( - str(err), "Table '{}' already exists with different Column Families.".format(TABLE_ID) - ) + self.assertEqual(str(err), f"Table '{TABLE_ID}' already exists with different Column Families.") mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN, @@ -986,9 +984,7 @@ def test_creating_table_that_exists_with_different_column_families_gc_rule_in__t with self.assertRaises(AirflowException) as e: op.execute(None) err = e.exception - self.assertEqual( - str(err), "Table '{}' already exists with different Column Families.".format(TABLE_ID) - ) + self.assertEqual(str(err), f"Table '{TABLE_ID}' already exists with different Column Families.") mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN, diff --git a/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py b/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py index 5f8425a41ded2..5a1c8499040e3 100755 --- a/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py +++ b/tests/providers/google/cloud/operators/test_cloud_build_system_helper.py @@ -60,10 +60,8 @@ def create_repository_and_bucket(self): file.write('CMD ["/quickstart.sh"]\n') # 2. Prepare bucket - self.execute_cmd(["gsutil", "mb", "gs://{}".format(GCP_BUCKET_NAME)]) - self.execute_cmd( - ["bash", "-c", "tar -zcvf - -C {} . | gsutil cp -r - {}".format(tmp_dir, GCP_ARCHIVE_URL)] - ) + self.execute_cmd(["gsutil", "mb", f"gs://{GCP_BUCKET_NAME}"]) + self.execute_cmd(["bash", "-c", f"tar -zcvf - -C {tmp_dir} . | gsutil cp -r - {GCP_ARCHIVE_URL}"]) # 3. Prepare repo self.execute_cmd(["gcloud", "source", "repos", "create", GCP_REPOSITORY_NAME]) @@ -91,14 +89,14 @@ def delete_repo(self): def delete_bucket(self): """Delete bucket in Google Cloud Storage service""" - self.execute_cmd(["gsutil", "rm", "-r", "gs://{}".format(GCP_BUCKET_NAME)]) + self.execute_cmd(["gsutil", "rm", "-r", f"gs://{GCP_BUCKET_NAME}"]) def delete_docker_images(self): """Delete images in Google Cloud Container Registry""" - repo_image_name = "gcr.io/{}/{}".format(GCP_PROJECT_ID, GCP_REPOSITORY_NAME) + repo_image_name = f"gcr.io/{GCP_PROJECT_ID}/{GCP_REPOSITORY_NAME}" self.execute_cmd(["gcloud", "container", "images", "delete", "--quiet", repo_image_name]) - bucket_image_name = "gcr.io/{}/{}".format(GCP_PROJECT_ID, GCP_BUCKET_NAME) + bucket_image_name = f"gcr.io/{GCP_PROJECT_ID}/{GCP_BUCKET_NAME}" self.execute_cmd(["gcloud", "container", "images", "delete", "--quiet", bucket_image_name]) @@ -144,7 +142,7 @@ def delete_docker_images(self): elif action == "delete-docker-images": helper.delete_docker_images() else: - raise Exception("Unknown action: {}".format(action)) + raise Exception(f"Unknown action: {action}") finally: gcp_authenticator.gcp_restore_authentication() diff --git a/tests/providers/google/cloud/operators/test_cloud_sql_system_helper.py b/tests/providers/google/cloud/operators/test_cloud_sql_system_helper.py index 8bdc320d4304c..1201896416fcd 100755 --- a/tests/providers/google/cloud/operators/test_cloud_sql_system_helper.py +++ b/tests/providers/google/cloud/operators/test_cloud_sql_system_helper.py @@ -160,7 +160,7 @@ def check_if_instances_are_up(self, instance_suffix=''): 'instances', 'describe', get_postgres_instance_name(instance_suffix), - "--project={}".format(GCP_PROJECT_ID), + f"--project={GCP_PROJECT_ID}", ] ) if res_postgres != 0: @@ -172,7 +172,7 @@ def check_if_instances_are_up(self, instance_suffix=''): 'instances', 'describe', get_postgres_instance_name(instance_suffix), - "--project={}".format(GCP_PROJECT_ID), + f"--project={GCP_PROJECT_ID}", ] ) if res_postgres != 0: @@ -190,8 +190,8 @@ def authorize_address(self, instance_suffix=''): 'patch', get_postgres_instance_name(instance_suffix), '--quiet', - "--authorized-networks={}".format(ip_address), - "--project={}".format(GCP_PROJECT_ID), + f"--authorized-networks={ip_address}", + f"--project={GCP_PROJECT_ID}", ] ) ) @@ -204,8 +204,8 @@ def authorize_address(self, instance_suffix=''): 'patch', get_mysql_instance_name(instance_suffix), '--quiet', - "--authorized-networks={}".format(ip_address), - "--project={}".format(GCP_PROJECT_ID), + f"--authorized-networks={ip_address}", + f"--project={GCP_PROJECT_ID}", ] ) ) @@ -284,8 +284,8 @@ def __delete_service_accounts_acls(self): 'gsutil', 'iam', 'get', - "gs://{}".format(export_bucket_name), - "--project={}".format(GCP_PROJECT_ID), + f"gs://{export_bucket_name}", + f"--project={GCP_PROJECT_ID}", ] ) all_permissions_dejson = json.loads(all_permissions.decode("utf-8")) @@ -305,9 +305,7 @@ def __delete_service_accounts_acls(self): self.log.warning( "Skip removing member %s as the type %s is not service account", member, member_type ) - self.execute_cmd( - ['gsutil', 'acl', 'ch', '-d', member_email, "gs://{}".format(export_bucket_name)] - ) + self.execute_cmd(['gsutil', 'acl', 'ch', '-d', member_email, f"gs://{export_bucket_name}"]) @staticmethod def set_ip_addresses_in_env(): @@ -317,7 +315,7 @@ def set_ip_addresses_in_env(): @staticmethod def __set_ip_address_in_env(file_name): if os.path.exists(file_name): - with open(file_name, "r") as file: + with open(file_name) as file: env, ip_address = file.read().split("=") os.environ[env] = ip_address @@ -556,7 +554,7 @@ def __get_ip_address(self, instance_name: str, env_var: str) -> str: .strip() ) os.environ[env_var] = ip_address - return "{}={}".format(env_var, ip_address) + return f"{env_var}={ip_address}" def __get_operations(self, instance_name: str) -> str: op_name_bytes = self.check_output( @@ -588,7 +586,7 @@ def __wait_for_create(self, operation_name: str) -> None: helper = CloudSqlQueryTestHelper() gcp_authenticator = GcpAuthenticator(GCP_CLOUDSQL_KEY) - helper.log.info('Starting action: {}'.format(action)) + helper.log.info(f'Starting action: {action}') gcp_authenticator.gcp_store_authentication() try: @@ -630,7 +628,7 @@ def __wait_for_create(self, operation_name: str) -> None: elif action == 'delete-service-accounts-acls': helper.delete_service_account_acls() else: - raise Exception("Unknown action: {}".format(action)) + raise Exception(f"Unknown action: {action}") finally: gcp_authenticator.gcp_restore_authentication() - helper.log.info('Finishing action: {}'.format(action)) + helper.log.info(f'Finishing action: {action}') diff --git a/tests/providers/google/cloud/operators/test_compute.py b/tests/providers/google/cloud/operators/test_compute.py index 2f8b726444434..d4cb1eb76dfc0 100644 --- a/tests/providers/google/cloud/operators/test_compute.py +++ b/tests/providers/google/cloud/operators/test_compute.py @@ -43,9 +43,7 @@ GCE_ZONE = 'zone' RESOURCE_ID = 'resource-id' GCE_SHORT_MACHINE_TYPE_NAME = 'n1-machine-type' -SET_MACHINE_TYPE_BODY = { - 'machineType': 'zones/{}/machineTypes/{}'.format(GCE_ZONE, GCE_SHORT_MACHINE_TYPE_NAME) -} +SET_MACHINE_TYPE_BODY = {'machineType': f'zones/{GCE_ZONE}/machineTypes/{GCE_SHORT_MACHINE_TYPE_NAME}'} DEFAULT_DATE = timezone.datetime(2017, 1, 1) diff --git a/tests/providers/google/cloud/operators/test_compute_system_helper.py b/tests/providers/google/cloud/operators/test_compute_system_helper.py index 22640ee81d3f8..9c2a74d8c2f25 100755 --- a/tests/providers/google/cloud/operators/test_compute_system_helper.py +++ b/tests/providers/google/cloud/operators/test_compute_system_helper.py @@ -188,7 +188,7 @@ def create_instance_group_and_template(self): helper = GCPComputeTestHelper() gcp_authenticator = GcpAuthenticator(GCP_COMPUTE_KEY) - helper.log.info('Starting action: {}'.format(action)) + helper.log.info(f'Starting action: {action}') gcp_authenticator.gcp_store_authentication() try: @@ -206,8 +206,8 @@ def create_instance_group_and_template(self): elif action == 'delete-instance-group': helper.delete_instance_group_and_template() else: - raise Exception("Unknown action: {}".format(action)) + raise Exception(f"Unknown action: {action}") finally: gcp_authenticator.gcp_restore_authentication() - helper.log.info('Finishing action: {}'.format(action)) + helper.log.info(f'Finishing action: {action}') diff --git a/tests/providers/google/cloud/operators/test_functions.py b/tests/providers/google/cloud/operators/test_functions.py index cf2c8856e1f04..a96d45ae1ee2c 100644 --- a/tests/providers/google/cloud/operators/test_functions.py +++ b/tests/providers/google/cloud/operators/test_functions.py @@ -39,7 +39,7 @@ GCP_LOCATION = 'test_region' GCF_SOURCE_ARCHIVE_URL = 'gs://folder/file.zip' GCF_ENTRYPOINT = 'helloWorld' -FUNCTION_NAME = 'projects/{}/locations/{}/functions/{}'.format(GCP_PROJECT_ID, GCP_LOCATION, GCF_ENTRYPOINT) +FUNCTION_NAME = f'projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/functions/{GCF_ENTRYPOINT}' GCF_RUNTIME = 'nodejs6' VALID_RUNTIMES = ['nodejs6', 'nodejs8', 'python37'] VALID_BODY = { @@ -627,7 +627,7 @@ def test_invalid_name(self, mock_hook): op = CloudFunctionDeleteFunctionOperator(name="invalid_name", task_id="id") op.execute(None) err = cm.exception - self.assertEqual(str(err), 'Parameter name must match pattern: {}'.format(FUNCTION_NAME_PATTERN)) + self.assertEqual(str(err), f'Parameter name must match pattern: {FUNCTION_NAME_PATTERN}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.operators.functions.CloudFunctionsHook') diff --git a/tests/providers/google/cloud/operators/test_gcs_system_helper.py b/tests/providers/google/cloud/operators/test_gcs_system_helper.py index 8aa1ca8636bc4..5b9b379c4319f 100644 --- a/tests/providers/google/cloud/operators/test_gcs_system_helper.py +++ b/tests/providers/google/cloud/operators/test_gcs_system_helper.py @@ -57,5 +57,5 @@ def remove_test_files(): os.remove(PATH_TO_TRANSFORM_SCRIPT) def remove_bucket(self): - self.execute_cmd(["gsutil", "rm", "-r", "gs://{bucket}".format(bucket=BUCKET_1)]) - self.execute_cmd(["gsutil", "rm", "-r", "gs://{bucket}".format(bucket=BUCKET_2)]) + self.execute_cmd(["gsutil", "rm", "-r", f"gs://{BUCKET_1}"]) + self.execute_cmd(["gsutil", "rm", "-r", f"gs://{BUCKET_2}"]) diff --git a/tests/providers/google/cloud/operators/test_pubsub.py b/tests/providers/google/cloud/operators/test_pubsub.py index e501de1ffc390..662baa8e7db4d 100644 --- a/tests/providers/google/cloud/operators/test_pubsub.py +++ b/tests/providers/google/cloud/operators/test_pubsub.py @@ -234,7 +234,7 @@ def _generate_messages(self, count): { "ack_id": "%s" % i, "message": { - "data": 'Message {}'.format(i).encode('utf8'), + "data": f'Message {i}'.encode('utf8'), "attributes": {"type": "generated message"}, }, }, diff --git a/tests/providers/google/cloud/operators/test_spanner.py b/tests/providers/google/cloud/operators/test_spanner.py index 30184a59f274a..1ce86f2044d0e 100644 --- a/tests/providers/google/cloud/operators/test_spanner.py +++ b/tests/providers/google/cloud/operators/test_spanner.py @@ -183,7 +183,7 @@ def test_instance_create_ex_if_param_missing(self, project_id, instance_id, exp_ task_id="id", ) err = cm.exception - self.assertIn("The required parameter '{}' is empty".format(exp_msg), str(err)) + self.assertIn(f"The required parameter '{exp_msg}' is empty", str(err)) mock_hook.assert_not_called() @mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook") @@ -237,7 +237,7 @@ def test_instance_delete_ex_if_param_missing(self, project_id, instance_id, exp_ with self.assertRaises(AirflowException) as cm: SpannerDeleteInstanceOperator(project_id=project_id, instance_id=instance_id, task_id="id") err = cm.exception - self.assertIn("The required parameter '{}' is empty".format(exp_msg), str(err)) + self.assertIn(f"The required parameter '{exp_msg}' is empty", str(err)) mock_hook.assert_not_called() @mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook") @@ -297,7 +297,7 @@ def test_instance_query_ex_if_param_missing( task_id="id", ) err = cm.exception - self.assertIn("The required parameter '{}' is empty".format(exp_msg), str(err)) + self.assertIn(f"The required parameter '{exp_msg}' is empty", str(err)) mock_hook.assert_not_called() @mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook") @@ -418,7 +418,7 @@ def test_database_create_ex_if_param_missing( task_id="id", ) err = cm.exception - self.assertIn("The required parameter '{}' is empty".format(exp_msg), str(err)) + self.assertIn(f"The required parameter '{exp_msg}' is empty", str(err)) mock_hook.assert_not_called() @mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook") @@ -485,7 +485,7 @@ def test_database_update_ex_if_param_missing( task_id="id", ) err = cm.exception - self.assertIn("The required parameter '{}' is empty".format(exp_msg), str(err)) + self.assertIn(f"The required parameter '{exp_msg}' is empty", str(err)) mock_hook.assert_not_called() @mock.patch("airflow.providers.google.cloud.operators.spanner.SpannerHook") @@ -575,5 +575,5 @@ def test_database_delete_ex_if_param_missing( task_id="id", ) err = cm.exception - self.assertIn("The required parameter '{}' is empty".format(exp_msg), str(err)) + self.assertIn(f"The required parameter '{exp_msg}' is empty", str(err)) mock_hook.assert_not_called() diff --git a/tests/providers/google/cloud/sensors/test_bigtable.py b/tests/providers/google/cloud/sensors/test_bigtable.py index be5054b4da4e0..e6df23c613e1b 100644 --- a/tests/providers/google/cloud/sensors/test_bigtable.py +++ b/tests/providers/google/cloud/sensors/test_bigtable.py @@ -54,7 +54,7 @@ def test_empty_attribute(self, missing_attribute, project_id, instance_id, table impersonation_chain=IMPERSONATION_CHAIN, ) err = e.exception - self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) + self.assertEqual(str(err), f'Empty parameter: {missing_attribute}') mock_hook.assert_not_called() @mock.patch('airflow.providers.google.cloud.sensors.bigtable.BigtableHook') diff --git a/tests/providers/google/cloud/sensors/test_pubsub.py b/tests/providers/google/cloud/sensors/test_pubsub.py index c9c8d11a321d2..46c85e3158e50 100644 --- a/tests/providers/google/cloud/sensors/test_pubsub.py +++ b/tests/providers/google/cloud/sensors/test_pubsub.py @@ -38,7 +38,7 @@ def _generate_messages(self, count): { "ack_id": "%s" % i, "message": { - "data": 'Message {}'.format(i).encode('utf8'), + "data": f'Message {i}'.encode('utf8'), "attributes": {"type": "generated message"}, }, }, diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py index c698c09a7ec03..11f71fbb4d451 100644 --- a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py +++ b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery.py @@ -30,7 +30,7 @@ class TestBigQueryToBigQueryOperator(unittest.TestCase): @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_bigquery.BigQueryHook') def test_execute(self, mock_hook): - source_project_dataset_tables = '{}.{}'.format(TEST_DATASET, TEST_TABLE_ID) + source_project_dataset_tables = f'{TEST_DATASET}.{TEST_TABLE_ID}' destination_project_dataset_table = '{}.{}'.format(TEST_DATASET + '_new', TEST_TABLE_ID) write_disposition = 'WRITE_EMPTY' create_disposition = 'CREATE_IF_NEEDED' diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py index 81834ef52139d..d25dbf1955bd1 100644 --- a/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_bigquery_to_gcs.py @@ -31,7 +31,7 @@ class TestBigQueryToCloudStorageOperator(unittest.TestCase): @mock.patch('airflow.providers.google.cloud.transfers.bigquery_to_gcs.BigQueryHook') def test_execute(self, mock_hook): - source_project_dataset_table = '{}.{}'.format(TEST_DATASET, TEST_TABLE_ID) + source_project_dataset_table = f'{TEST_DATASET}.{TEST_TABLE_ID}' destination_cloud_storage_uris = ['gs://some-bucket/some-file.txt'] compression = 'NONE' export_format = 'CSV' diff --git a/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py index 3c2c492a8d7b9..29811ab33ac77 100644 --- a/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py +++ b/tests/providers/google/cloud/transfers/test_bigquery_to_mysql.py @@ -33,7 +33,7 @@ def test_execute_good_request_to_bq(self, mock_hook): destination_table = 'table' operator = BigQueryToMySqlOperator( task_id=TASK_ID, - dataset_table='{}.{}'.format(TEST_DATASET, TEST_TABLE_ID), + dataset_table=f'{TEST_DATASET}.{TEST_TABLE_ID}', mysql_table=destination_table, replace=False, ) diff --git a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py index 120f520093368..a912629d5e6b5 100644 --- a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py +++ b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py @@ -47,11 +47,11 @@ def provide_facebook_connection(key_file_path: str): """ if not key_file_path.endswith(".json"): raise AirflowException("Use a JSON key file.") - with open(key_file_path, 'r') as credentials: + with open(key_file_path) as credentials: creds = json.load(credentials) missing_keys = CONFIG_REQUIRED_FIELDS - creds.keys() if missing_keys: - message = "{missing_keys} fields are missing".format(missing_keys=missing_keys) + message = f"{missing_keys} fields are missing" raise AirflowException(message) conn = Connection(conn_id=FACEBOOK_CONNECTION_ID, conn_type=CONNECTION_TYPE, extra=json.dumps(creds)) with patch_environ({f"AIRFLOW_CONN_{conn.conn_id.upper()}": conn.get_uri()}): diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py index 13b3a7afeb372..8878fb77ed64b 100644 --- a/tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py +++ b/tests/providers/google/cloud/transfers/test_gcs_to_gcs_system.py @@ -41,14 +41,14 @@ def create_buckets(self): self.create_gcs_bucket(name) # 2. Prepare parents - first_parent = "gs://{}/parent-1.bin".format(BUCKET_1_SRC) - second_parent = "gs://{}/parent-2.bin".format(BUCKET_1_SRC) + first_parent = f"gs://{BUCKET_1_SRC}/parent-1.bin" + second_parent = f"gs://{BUCKET_1_SRC}/parent-2.bin" self.execute_with_ctx( [ "bash", "-c", - "cat /dev/urandom | head -c $((1 * 1024 * 1024)) | gsutil cp - {}".format(first_parent), + f"cat /dev/urandom | head -c $((1 * 1024 * 1024)) | gsutil cp - {first_parent}", ], key=GCP_GCS_KEY, ) @@ -57,7 +57,7 @@ def create_buckets(self): [ "bash", "-c", - "cat /dev/urandom | head -c $((1 * 1024 * 1024)) | gsutil cp - {}".format(second_parent), + f"cat /dev/urandom | head -c $((1 * 1024 * 1024)) | gsutil cp - {second_parent}", ], key=GCP_GCS_KEY, ) diff --git a/tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py b/tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py index 889613656c956..ca4f59d0ca755 100644 --- a/tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py +++ b/tests/providers/google/cloud/transfers/test_gcs_to_sftp_system.py @@ -43,12 +43,12 @@ def setUp(self): for bucket_src, object_source in product( ( BUCKET_SRC, - "{}/subdir-1".format(BUCKET_SRC), - "{}/subdir-2".format(BUCKET_SRC), + f"{BUCKET_SRC}/subdir-1", + f"{BUCKET_SRC}/subdir-2", ), (OBJECT_SRC_1, OBJECT_SRC_2), ): - source_path = "gs://{}/{}".format(bucket_src, object_source) + source_path = f"gs://{bucket_src}/{object_source}" self.upload_content_to_gcs( lines=f"{os.urandom(1 * 1024 * 1024)}", bucket=source_path, filename=object_source ) diff --git a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py index 7c34f86414227..b0c4c0df754ce 100644 --- a/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_postgres_to_gcs.py @@ -52,8 +52,8 @@ def setUpClass(cls): with postgres.get_conn() as conn: with conn.cursor() as cur: for table in TABLES: - cur.execute("DROP TABLE IF EXISTS {} CASCADE;".format(table)) - cur.execute("CREATE TABLE {}(some_str varchar, some_num integer);".format(table)) + cur.execute(f"DROP TABLE IF EXISTS {table} CASCADE;") + cur.execute(f"CREATE TABLE {table}(some_str varchar, some_num integer);") cur.execute( "INSERT INTO postgres_to_gcs_operator VALUES(%s, %s);", ('mock_row_content_1', 42) @@ -71,7 +71,7 @@ def tearDownClass(cls): with postgres.get_conn() as conn: with conn.cursor() as cur: for table in TABLES: - cur.execute("DROP TABLE IF EXISTS {} CASCADE;".format(table)) + cur.execute(f"DROP TABLE IF EXISTS {table} CASCADE;") def test_init(self): """Test PostgresToGoogleCloudStorageOperator instance is properly initialized.""" diff --git a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py index e42da7a675197..e52e8ddf39ded 100644 --- a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py @@ -18,7 +18,7 @@ import unittest from collections import OrderedDict -import mock +from unittest import mock from airflow.providers.google.cloud.hooks.gcs import GCSHook from airflow.providers.google.cloud.transfers.salesforce_to_gcs import SalesforceToGcsOperator @@ -29,7 +29,7 @@ SALESFORCE_CONNECTION_ID = "test-salesforce-connection" GCS_BUCKET = "test-bucket" GCS_OBJECT_PATH = "path/to/test-file-path" -EXPECTED_GCS_URI = "gs://{}/{}".format(GCS_BUCKET, GCS_OBJECT_PATH) +EXPECTED_GCS_URI = f"gs://{GCS_BUCKET}/{GCS_OBJECT_PATH}" GCP_CONNECTION_ID = "google_cloud_default" SALESFORCE_RESPONSE = { 'records': [ diff --git a/tests/providers/google/cloud/utils/gcp_authenticator.py b/tests/providers/google/cloud/utils/gcp_authenticator.py index 46ba8352c1fd3..7c6ca9db2541d 100644 --- a/tests/providers/google/cloud/utils/gcp_authenticator.py +++ b/tests/providers/google/cloud/utils/gcp_authenticator.py @@ -117,7 +117,7 @@ def set_dictionary_in_airflow_connection(self): try: conn = session.query(Connection).filter(Connection.conn_id == 'google_cloud_default')[0] extras = conn.extra_dejson - with open(self.full_key_path, "r") as path_file: + with open(self.full_key_path) as path_file: content = json.load(path_file) extras[KEYFILE_DICT_EXTRA] = json.dumps(content) if extras.get(KEYPATH_EXTRA): @@ -180,8 +180,8 @@ def gcp_authenticate(self): 'gcloud', 'auth', 'activate-service-account', - '--key-file={}'.format(self.full_key_path), - '--project={}'.format(self.project_id), + f'--key-file={self.full_key_path}', + f'--project={self.project_id}', ] ) self.set_key_path_in_airflow_connection() @@ -192,10 +192,8 @@ def gcp_revoke_authentication(self): """ self._validate_key_set() self.log.info("Revoking authentication - setting it to none") - self.execute_cmd(['gcloud', 'config', 'get-value', 'account', '--project={}'.format(self.project_id)]) - self.execute_cmd( - ['gcloud', 'config', 'set', 'account', 'none', '--project={}'.format(self.project_id)] - ) + self.execute_cmd(['gcloud', 'config', 'get-value', 'account', f'--project={self.project_id}']) + self.execute_cmd(['gcloud', 'config', 'set', 'account', 'none', f'--project={self.project_id}']) def gcp_store_authentication(self): """ @@ -205,7 +203,7 @@ def gcp_store_authentication(self): self._validate_key_set() if not GcpAuthenticator.original_account: GcpAuthenticator.original_account = self.check_output( - ['gcloud', 'config', 'get-value', 'account', '--project={}'.format(self.project_id)] + ['gcloud', 'config', 'get-value', 'account', f'--project={self.project_id}'] ).decode('utf-8') self.log.info("Storing account: to restore it later %s", GcpAuthenticator.original_account) @@ -223,7 +221,7 @@ def gcp_restore_authentication(self): 'set', 'account', GcpAuthenticator.original_account, - '--project={}'.format(self.project_id), + f'--project={self.project_id}', ] ) else: diff --git a/tests/providers/microsoft/winrm/hooks/test_winrm.py b/tests/providers/microsoft/winrm/hooks/test_winrm.py index a7b7570bc8235..8b9643ba533e7 100644 --- a/tests/providers/microsoft/winrm/hooks/test_winrm.py +++ b/tests/providers/microsoft/winrm/hooks/test_winrm.py @@ -116,5 +116,5 @@ def test_get_conn_no_endpoint(self, mock_protocol): winrm_hook.get_conn() self.assertEqual( - 'http://{0}:{1}/wsman'.format(winrm_hook.remote_host, winrm_hook.remote_port), winrm_hook.endpoint + f'http://{winrm_hook.remote_host}:{winrm_hook.remote_port}/wsman', winrm_hook.endpoint ) diff --git a/tests/providers/mysql/hooks/test_mysql.py b/tests/providers/mysql/hooks/test_mysql.py index 5dc3ee284e6a2..ffe87dfdbe355 100644 --- a/tests/providers/mysql/hooks/test_mysql.py +++ b/tests/providers/mysql/hooks/test_mysql.py @@ -333,10 +333,10 @@ def __init__(self, client): self.init_client = self.connection.extra_dejson.get('client', 'mysqlclient') def __enter__(self): - self.connection.set_extra('{{"client": "{}"}}'.format(self.client)) + self.connection.set_extra(f'{{"client": "{self.client}"}}') def __exit__(self, exc_type, exc_val, exc_tb): - self.connection.set_extra('{{"client": "{}"}}'.format(self.init_client)) + self.connection.set_extra(f'{{"client": "{self.init_client}"}}') @pytest.mark.backend("mysql") @@ -350,7 +350,7 @@ def tearDown(self): drop_tables = {'test_mysql_to_mysql', 'test_airflow'} with MySqlHook().get_conn() as conn: for table in drop_tables: - conn.execute("DROP TABLE IF EXISTS {}".format(table)) + conn.execute(f"DROP TABLE IF EXISTS {table}") @parameterized.expand( [ @@ -404,10 +404,10 @@ def test_mysql_hook_test_bulk_dump(self, client): # Confirm that no error occurs hook.bulk_dump( "INFORMATION_SCHEMA.TABLES", - os.path.join(priv[0], "TABLES_{}-{}".format(client, uuid.uuid1())), + os.path.join(priv[0], f"TABLES_{client}-{uuid.uuid1()}"), ) elif priv == ("",): - hook.bulk_dump("INFORMATION_SCHEMA.TABLES", "TABLES_{}_{}".format(client, uuid.uuid1())) + hook.bulk_dump("INFORMATION_SCHEMA.TABLES", f"TABLES_{client}_{uuid.uuid1()}") else: self.skipTest("Skip test_mysql_hook_test_bulk_load " "since file output is not permitted") diff --git a/tests/providers/mysql/operators/test_mysql.py b/tests/providers/mysql/operators/test_mysql.py index d9b064c7c3a62..af5cc25e122d2 100644 --- a/tests/providers/mysql/operators/test_mysql.py +++ b/tests/providers/mysql/operators/test_mysql.py @@ -43,7 +43,7 @@ def tearDown(self): drop_tables = {'test_mysql_to_mysql', 'test_airflow'} with MySqlHook().get_conn() as conn: for table in drop_tables: - conn.execute("DROP TABLE IF EXISTS {}".format(table)) + conn.execute(f"DROP TABLE IF EXISTS {table}") @parameterized.expand( [ diff --git a/tests/providers/postgres/hooks/test_postgres.py b/tests/providers/postgres/hooks/test_postgres.py index 9bcfdf50eec55..07e100fe2848c 100644 --- a/tests/providers/postgres/hooks/test_postgres.py +++ b/tests/providers/postgres/hooks/test_postgres.py @@ -117,7 +117,7 @@ def test_get_conn_extra(self, mock_connect): def test_get_conn_rds_iam_redshift(self, mock_client, mock_connect): self.connection.extra = '{"iam":true, "redshift":true}' self.connection.host = 'cluster-identifier.ccdfre4hpd39h.us-east-1.redshift.amazonaws.com' - login = 'IAM:{login}'.format(login=self.connection.login) + login = f'IAM:{self.connection.login}' mock_client.return_value.get_cluster_credentials.return_value = { 'DbPassword': 'aws_token', 'DbUser': login, @@ -153,7 +153,7 @@ def tearDown(self): with PostgresHook().get_conn() as conn: with conn.cursor() as cur: - cur.execute("DROP TABLE IF EXISTS {}".format(self.table)) + cur.execute(f"DROP TABLE IF EXISTS {self.table}") @pytest.mark.backend("postgres") def test_copy_expert(self): @@ -179,7 +179,7 @@ def test_bulk_load(self): with hook.get_conn() as conn: with conn.cursor() as cur: - cur.execute("CREATE TABLE {} (c VARCHAR)".format(self.table)) + cur.execute(f"CREATE TABLE {self.table} (c VARCHAR)") conn.commit() with NamedTemporaryFile() as f: @@ -187,7 +187,7 @@ def test_bulk_load(self): f.flush() hook.bulk_load(self.table, f.name) - cur.execute("SELECT * FROM {}".format(self.table)) + cur.execute(f"SELECT * FROM {self.table}") results = [row[0] for row in cur.fetchall()] self.assertEqual(sorted(input_data), sorted(results)) @@ -199,9 +199,9 @@ def test_bulk_dump(self): with hook.get_conn() as conn: with conn.cursor() as cur: - cur.execute("CREATE TABLE {} (c VARCHAR)".format(self.table)) - values = ",".join("('{}')".format(data) for data in input_data) - cur.execute("INSERT INTO {} VALUES {}".format(self.table, values)) + cur.execute(f"CREATE TABLE {self.table} (c VARCHAR)") + values = ",".join(f"('{data}')" for data in input_data) + cur.execute(f"INSERT INTO {self.table} VALUES {values}") conn.commit() with NamedTemporaryFile() as f: @@ -224,7 +224,7 @@ def test_insert_rows(self): commit_count = 2 # The first and last commit self.assertEqual(commit_count, self.conn.commit.call_count) - sql = "INSERT INTO {} VALUES (%s)".format(table) + sql = f"INSERT INTO {table} VALUES (%s)" for row in rows: self.cur.execute.assert_any_call(sql, row) @@ -299,8 +299,8 @@ def test_rowcount(self): with hook.get_conn() as conn: with conn.cursor() as cur: - cur.execute("CREATE TABLE {} (c VARCHAR)".format(self.table)) - values = ",".join("('{}')".format(data) for data in input_data) - cur.execute("INSERT INTO {} VALUES {}".format(self.table, values)) + cur.execute(f"CREATE TABLE {self.table} (c VARCHAR)") + values = ",".join(f"('{data}')" for data in input_data) + cur.execute(f"INSERT INTO {self.table} VALUES {values}") conn.commit() self.assertEqual(cur.rowcount, len(input_data)) diff --git a/tests/providers/sendgrid/utils/test_emailer.py b/tests/providers/sendgrid/utils/test_emailer.py index 5a17c712a640f..df872013447ba 100644 --- a/tests/providers/sendgrid/utils/test_emailer.py +++ b/tests/providers/sendgrid/utils/test_emailer.py @@ -80,7 +80,7 @@ def test_send_email_sendgrid_correct_email(self, mock_post): attachments=[ { 'content': 'dGhpcyBpcyBzb21lIHRlc3QgZGF0YQ==', - 'content_id': '<{0}>'.format(filename), + 'content_id': f'<{filename}>', 'disposition': 'attachment', 'filename': filename, 'type': 'text/plain', diff --git a/tests/providers/sftp/operators/test_sftp.py b/tests/providers/sftp/operators/test_sftp.py index fde5b47730073..927b6d6060c77 100644 --- a/tests/providers/sftp/operators/test_sftp.py +++ b/tests/providers/sftp/operators/test_sftp.py @@ -53,12 +53,12 @@ def setUp(self): self.test_remote_dir = "/tmp/tmp1" self.test_local_filename = 'test_local_file' self.test_remote_filename = 'test_remote_file' - self.test_local_filepath = '{0}/{1}'.format(self.test_dir, self.test_local_filename) + self.test_local_filepath = f'{self.test_dir}/{self.test_local_filename}' # Local Filepath with Intermediate Directory - self.test_local_filepath_int_dir = '{0}/{1}'.format(self.test_local_dir, self.test_local_filename) - self.test_remote_filepath = '{0}/{1}'.format(self.test_dir, self.test_remote_filename) + self.test_local_filepath_int_dir = f'{self.test_local_dir}/{self.test_local_filename}' + self.test_remote_filepath = f'{self.test_dir}/{self.test_remote_filename}' # Remote Filepath with Intermediate Directory - self.test_remote_filepath_int_dir = '{0}/{1}'.format(self.test_remote_dir, self.test_remote_filename) + self.test_remote_filepath_int_dir = f'{self.test_remote_dir}/{self.test_remote_filename}' @conf_vars({('core', 'enable_xcom_pickling'): 'True'}) def test_pickle_file_transfer_put(self): @@ -88,7 +88,7 @@ def test_pickle_file_transfer_put(self): check_file_task = SSHOperator( task_id="check_file_task", ssh_hook=self.hook, - command="cat {0}".format(self.test_remote_filepath), + command=f"cat {self.test_remote_filepath}", do_xcom_push=True, dag=self.dag, ) @@ -156,7 +156,7 @@ def test_file_transfer_with_intermediate_dir_put(self): check_file_task = SSHOperator( task_id="test_check_file", ssh_hook=self.hook, - command="cat {0}".format(self.test_remote_filepath_int_dir), + command=f"cat {self.test_remote_filepath_int_dir}", do_xcom_push=True, dag=self.dag, ) @@ -194,7 +194,7 @@ def test_json_file_transfer_put(self): check_file_task = SSHOperator( task_id="check_file_task", ssh_hook=self.hook, - command="cat {0}".format(self.test_remote_filepath), + command=f"cat {self.test_remote_filepath}", do_xcom_push=True, dag=self.dag, ) @@ -217,7 +217,7 @@ def test_pickle_file_transfer_get(self): create_file_task = SSHOperator( task_id="test_create_file", ssh_hook=self.hook, - command="echo '{0}' > {1}".format(test_remote_file_content, self.test_remote_filepath), + command=f"echo '{test_remote_file_content}' > {self.test_remote_filepath}", do_xcom_push=True, dag=self.dag, ) @@ -240,7 +240,7 @@ def test_pickle_file_transfer_get(self): # test the received content content_received = None - with open(self.test_local_filepath, 'r') as file: + with open(self.test_local_filepath) as file: content_received = file.read() self.assertEqual(content_received.strip(), test_remote_file_content) @@ -255,7 +255,7 @@ def test_json_file_transfer_get(self): create_file_task = SSHOperator( task_id="test_create_file", ssh_hook=self.hook, - command="echo '{0}' > {1}".format(test_remote_file_content, self.test_remote_filepath), + command=f"echo '{test_remote_file_content}' > {self.test_remote_filepath}", do_xcom_push=True, dag=self.dag, ) @@ -278,7 +278,7 @@ def test_json_file_transfer_get(self): # test the received content content_received = None - with open(self.test_local_filepath, 'r') as file: + with open(self.test_local_filepath) as file: content_received = file.read() self.assertEqual(content_received.strip(), test_remote_file_content.encode('utf-8').decode('utf-8')) @@ -293,7 +293,7 @@ def test_file_transfer_no_intermediate_dir_error_get(self): create_file_task = SSHOperator( task_id="test_create_file", ssh_hook=self.hook, - command="echo '{0}' > {1}".format(test_remote_file_content, self.test_remote_filepath), + command=f"echo '{test_remote_file_content}' > {self.test_remote_filepath}", do_xcom_push=True, dag=self.dag, ) @@ -329,7 +329,7 @@ def test_file_transfer_with_intermediate_dir_error_get(self): create_file_task = SSHOperator( task_id="test_create_file", ssh_hook=self.hook, - command="echo '{0}' > {1}".format(test_remote_file_content, self.test_remote_filepath), + command=f"echo '{test_remote_file_content}' > {self.test_remote_filepath}", do_xcom_push=True, dag=self.dag, ) @@ -353,7 +353,7 @@ def test_file_transfer_with_intermediate_dir_error_get(self): # test the received content content_received = None - with open(self.test_local_filepath_int_dir, 'r') as file: + with open(self.test_local_filepath_int_dir) as file: content_received = file.read() self.assertEqual(content_received.strip(), test_remote_file_content) @@ -430,7 +430,7 @@ def delete_remote_resource(self): remove_file_task = SSHOperator( task_id="test_check_file", ssh_hook=self.hook, - command="rm {0}".format(self.test_remote_filepath), + command=f"rm {self.test_remote_filepath}", do_xcom_push=True, dag=self.dag, ) diff --git a/tests/secrets/test_local_filesystem.py b/tests/secrets/test_local_filesystem.py index 61849b3c16c28..bc6d0dea0a079 100644 --- a/tests/secrets/test_local_filesystem.py +++ b/tests/secrets/test_local_filesystem.py @@ -357,7 +357,7 @@ def test_ensure_unique_connection_yaml(self, file_content): class TestLocalFileBackend(unittest.TestCase): def test_should_read_variable(self): with NamedTemporaryFile(suffix="var.env") as tmp_file: - tmp_file.write("KEY_A=VAL_A".encode()) + tmp_file.write(b"KEY_A=VAL_A") tmp_file.flush() backend = LocalFilesystemBackend(variables_file_path=tmp_file.name) self.assertEqual("VAL_A", backend.get_variable("KEY_A")) @@ -365,7 +365,7 @@ def test_should_read_variable(self): def test_should_read_connection(self): with NamedTemporaryFile(suffix=".env") as tmp_file: - tmp_file.write("CONN_A=mysql://host_a".encode()) + tmp_file.write(b"CONN_A=mysql://host_a") tmp_file.flush() backend = LocalFilesystemBackend(connections_file_path=tmp_file.name) self.assertEqual( diff --git a/tests/secrets/test_secrets_backends.py b/tests/secrets/test_secrets_backends.py index 08f8dd4573c8e..ced829a96ce23 100644 --- a/tests/secrets/test_secrets_backends.py +++ b/tests/secrets/test_secrets_backends.py @@ -35,7 +35,7 @@ class SampleConn: def __init__(self, conn_id, variation: str): self.conn_id = conn_id self.var_name = "AIRFLOW_CONN_" + self.conn_id.upper() - self.host = "host_{}.com".format(variation) + self.host = f"host_{variation}.com" self.conn_uri = ( "mysql://user:pw@" + self.host + "/schema?extra1=val%2B1&extra2=val%2B2" ) diff --git a/tests/sensors/test_external_task_sensor.py b/tests/sensors/test_external_task_sensor.py index 0a978e81e67f8..7a1e917e8b31b 100644 --- a/tests/sensors/test_external_task_sensor.py +++ b/tests/sensors/test_external_task_sensor.py @@ -165,9 +165,9 @@ def test_templated_sensor(self): instance.render_templates() self.assertEqual(sensor.external_dag_id, - "dag_{}".format(DEFAULT_DATE.date())) + f"dag_{DEFAULT_DATE.date()}") self.assertEqual(sensor.external_task_id, - "task_{}".format(DEFAULT_DATE.date())) + f"task_{DEFAULT_DATE.date()}") def test_external_task_sensor_fn_multiple_execution_dates(self): bash_command_code = """ @@ -512,7 +512,7 @@ def clear_tasks(dag_bag, dag, task, start_date=DEFAULT_DATE, end_date=DEFAULT_DA """ Clear the task and its downstream tasks recursively for the dag in the given dagbag. """ - subdag = dag.sub_dag(task_ids_or_regex="^{}$".format(task.task_id), include_downstream=True) + subdag = dag.sub_dag(task_ids_or_regex=f"^{task.task_id}$", include_downstream=True) subdag.clear(start_date=start_date, end_date=end_date, dag_bag=dag_bag) diff --git a/tests/sensors/test_python.py b/tests/sensors/test_python.py index 7b950d96c6311..445496c8b6714 100644 --- a/tests/sensors/test_python.py +++ b/tests/sensors/test_python.py @@ -97,7 +97,7 @@ def test_python_callable_arguments_are_templatized(self): recorded_calls[0], Call(4, date(2019, 1, 1), - "dag {} ran on {}.".format(self.dag.dag_id, ds_templated), + f"dag {self.dag.dag_id} ran on {ds_templated}.", Named(ds_templated, 'unchanged')) ) diff --git a/tests/sensors/test_weekday_sensor.py b/tests/sensors/test_weekday_sensor.py index 0ff4f1938af3a..75b5cb9961b17 100644 --- a/tests/sensors/test_weekday_sensor.py +++ b/tests/sensors/test_weekday_sensor.py @@ -90,7 +90,7 @@ def test_weekday_sensor_false(self): def test_invalid_weekday_number(self): invalid_week_day = 'Thsday' with self.assertRaisesRegex(AttributeError, - 'Invalid Week Day passed: "{}"'.format(invalid_week_day)): + f'Invalid Week Day passed: "{invalid_week_day}"'): DayOfWeekSensor( task_id='weekday_sensor_invalid_weekday_num', week_day=invalid_week_day, diff --git a/tests/task/task_runner/test_standard_task_runner.py b/tests/task/task_runner/test_standard_task_runner.py index 2d964009f6e7f..b37040c2adfe8 100644 --- a/tests/task/task_runner/test_standard_task_runner.py +++ b/tests/task/task_runner/test_standard_task_runner.py @@ -95,7 +95,7 @@ def test_start_and_terminate(self): runner.terminate() for process in processes: - self.assertFalse(psutil.pid_exists(process.pid), "{} is still alive".format(process)) + self.assertFalse(psutil.pid_exists(process.pid), f"{process} is still alive") self.assertIsNotNone(runner.return_code()) @@ -121,7 +121,7 @@ def test_start_and_terminate_run_as_user(self): runner.terminate() for process in processes: - self.assertFalse(psutil.pid_exists(process.pid), "{} is still alive".format(process)) + self.assertFalse(psutil.pid_exists(process.pid), f"{process} is still alive") self.assertIsNotNone(runner.return_code()) @@ -175,11 +175,11 @@ def test_on_kill(self): break time.sleep(2) - with open(path, "r") as f: + with open(path) as f: self.assertEqual("ON_KILL_TEST", f.readline()) for process in processes: - self.assertFalse(psutil.pid_exists(process.pid), "{} is still alive".format(process)) + self.assertFalse(psutil.pid_exists(process.pid), f"{process} is still alive") @staticmethod def _procs_in_pgroup(pgid): diff --git a/tests/test_utils/get_all_tests.py b/tests/test_utils/get_all_tests.py index 0ac4028c19ccc..468c903f252dc 100644 --- a/tests/test_utils/get_all_tests.py +++ b/tests/test_utils/get_all_tests.py @@ -44,7 +44,7 @@ def print_all_cases(xunit_test_file_path): :param xunit_test_file_path: path of the xunit file :return: None """ - with open(xunit_test_file_path, "r") as file: + with open(xunit_test_file_path) as file: text = file.read() root = ElementTree.fromstring(text) diff --git a/tests/test_utils/mock_operators.py b/tests/test_utils/mock_operators.py index 8aa15d44655a6..ce410876413bf 100644 --- a/tests/test_utils/mock_operators.py +++ b/tests/test_utils/mock_operators.py @@ -91,7 +91,7 @@ def get_link(self, operator, dttm): if len(search_queries) < self.index: return None search_query = search_queries[self.index] - return 'https://console.cloud.google.com/bigquery?j={}'.format(search_query) + return f'https://console.cloud.google.com/bigquery?j={search_query}' class CustomOpLink(BaseOperatorLink): @@ -100,7 +100,7 @@ class CustomOpLink(BaseOperatorLink): def get_link(self, operator, dttm): ti = TaskInstance(task=operator, execution_date=dttm) search_query = ti.xcom_pull(task_ids=operator.task_id, key='search_query') - return 'http://google.com/custom_base_link?search={}'.format(search_query) + return f'http://google.com/custom_base_link?search={search_query}' class CustomOperator(BaseOperator): diff --git a/tests/test_utils/perf/dags/perf_dag_1.py b/tests/test_utils/perf/dags/perf_dag_1.py index 6f07b55b79faa..021a910b54433 100644 --- a/tests/test_utils/perf/dags/perf_dag_1.py +++ b/tests/test_utils/perf/dags/perf_dag_1.py @@ -41,7 +41,7 @@ for i in range(2, 5): task = BashOperator( - task_id='perf_task_{}'.format(i), + task_id=f'perf_task_{i}', bash_command=''' sleep 5; echo "run_id={{ run_id }} | dag_run={{ dag_run }}" ''', diff --git a/tests/test_utils/perf/dags/perf_dag_2.py b/tests/test_utils/perf/dags/perf_dag_2.py index 8f41d7d20a1c4..d9ef47efa1195 100644 --- a/tests/test_utils/perf/dags/perf_dag_2.py +++ b/tests/test_utils/perf/dags/perf_dag_2.py @@ -41,7 +41,7 @@ for i in range(2, 5): task = BashOperator( - task_id='perf_task_{}'.format(i), + task_id=f'perf_task_{i}', bash_command=''' sleep 5; echo "run_id={{ run_id }} | dag_run={{ dag_run }}" ''', diff --git a/tests/test_utils/perf/scheduler_ops_metrics.py b/tests/test_utils/perf/scheduler_ops_metrics.py index e84b42aa4af50..0d3b4a474c640 100644 --- a/tests/test_utils/perf/scheduler_ops_metrics.py +++ b/tests/test_utils/perf/scheduler_ops_metrics.py @@ -94,7 +94,7 @@ def print_stats(self): print('Performance Results') print('###################') for dag_id in DAG_IDS: - print('DAG {}'.format(dag_id)) + print(f'DAG {dag_id}') print(ti_perf_df[ti_perf_df['dag_id'] == dag_id]) print('###################') if len(tis) > len(successful_tis): diff --git a/tests/test_utils/salesforce_system_helpers.py b/tests/test_utils/salesforce_system_helpers.py index 4a0bc1c4f4ec7..811219ba79738 100644 --- a/tests/test_utils/salesforce_system_helpers.py +++ b/tests/test_utils/salesforce_system_helpers.py @@ -38,11 +38,11 @@ def provide_salesforce_connection(key_file_path: str): """ if not key_file_path.endswith(".json"): raise AirflowException("Use a JSON key file.") - with open(key_file_path, 'r') as credentials: + with open(key_file_path) as credentials: creds = json.load(credentials) missing_keys = CONFIG_REQUIRED_FIELDS - creds.keys() if missing_keys: - message = "{missing_keys} fields are missing".format(missing_keys=missing_keys) + message = f"{missing_keys} fields are missing" raise AirflowException(message) conn = Connection( conn_id=SALESFORCE_CONNECTION_ID, diff --git a/tests/test_utils/system_tests_class.py b/tests/test_utils/system_tests_class.py index 5dd85148172b3..a8d2bf5afec4d 100644 --- a/tests/test_utils/system_tests_class.py +++ b/tests/test_utils/system_tests_class.py @@ -115,7 +115,7 @@ def _print_all_log_files(): print() print(f" ================ Content of {filepath} ===============================") print() - with open(filepath, "r") as f: + with open(filepath) as f: print(f.read()) def run_dag(self, dag_id: str, dag_folder: str = DEFAULT_DAG_FOLDER) -> None: diff --git a/tests/utils/log/test_log_reader.py b/tests/utils/log/test_log_reader.py index fc654969bb1e6..2ae98c4534cff 100644 --- a/tests/utils/log/test_log_reader.py +++ b/tests/utils/log/test_log_reader.py @@ -79,7 +79,7 @@ def _configure_loggers(self): ] = "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts | replace(':', '.') }}/{{ try_number }}.log" settings_file = os.path.join(self.settings_folder, "airflow_local_settings.py") with open(settings_file, "w") as handle: - new_logging_file = "LOGGING_CONFIG = {}".format(logging_config) + new_logging_file = f"LOGGING_CONFIG = {logging_config}" handle.writelines(new_logging_file) sys.path.append(self.settings_folder) with conf_vars({("logging", "logging_config_class"): "airflow_local_settings.LOGGING_CONFIG"}): diff --git a/tests/www/api/experimental/test_endpoints.py b/tests/www/api/experimental/test_endpoints.py index d9885208f82a1..760e0ac639e44 100644 --- a/tests/www/api/experimental/test_endpoints.py +++ b/tests/www/api/experimental/test_endpoints.py @@ -440,7 +440,7 @@ def _get_pool_count(self): def test_get_pool(self): response = self.client.get( - '/api/experimental/pools/{}'.format(self.pool.pool), + f'/api/experimental/pools/{self.pool.pool}', ) self.assert_deprecated(response) self.assertEqual(response.status_code, 200) @@ -500,7 +500,7 @@ def test_create_pool_with_bad_name(self): def test_delete_pool(self): response = self.client.delete( - '/api/experimental/pools/{}'.format(self.pool.pool), + f'/api/experimental/pools/{self.pool.pool}', ) self.assert_deprecated(response) self.assertEqual(response.status_code, 200) diff --git a/tests/www/api/experimental/test_kerberos_endpoints.py b/tests/www/api/experimental/test_kerberos_endpoints.py index ed65b29da86f9..402e5cf059b73 100644 --- a/tests/www/api/experimental/test_kerberos_endpoints.py +++ b/tests/www/api/experimental/test_kerberos_endpoints.py @@ -63,7 +63,7 @@ def test_trigger_dag(self): ) self.assertEqual(401, response.status_code) - response.url = 'http://{}'.format(socket.getfqdn()) + response.url = f'http://{socket.getfqdn()}' class Request: headers = {} diff --git a/tests/www/test_security.py b/tests/www/test_security.py index eda73d18a502d..b44f67482da86 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -113,14 +113,14 @@ def assert_user_has_dag_perms(self, perms, dag_id, user=None): for perm in perms: self.assertTrue( self._has_dag_perm(perm, dag_id, user), - "User should have '{}' on DAG '{}'".format(perm, dag_id), + f"User should have '{perm}' on DAG '{dag_id}'", ) def assert_user_does_not_have_dag_perms(self, dag_id, perms, user=None): for perm in perms: self.assertFalse( self._has_dag_perm(perm, dag_id, user), - "User should not have '{}' on DAG '{}'".format(perm, dag_id), + f"User should not have '{perm}' on DAG '{dag_id}'", ) def _has_dag_perm(self, perm, dag_id, user): diff --git a/tests/www/test_views.py b/tests/www/test_views.py index 43628b7c7bac2..f56d79b7bd454 100644 --- a/tests/www/test_views.py +++ b/tests/www/test_views.py @@ -518,7 +518,7 @@ def test_doc_urls(self): if "dev" in version.version: airflow_doc_site = "https://airflow.readthedocs.io/en/latest" else: - airflow_doc_site = 'https://airflow.apache.org/docs/{}'.format(version.version) + airflow_doc_site = f'https://airflow.apache.org/docs/{version.version}' self.check_content_in_response(airflow_doc_site, resp) self.check_content_in_response("/api/v1/ui", resp) @@ -1071,8 +1071,8 @@ def test_delete_dag_button_for_dag_on_scheduler_only(self): self.session.commit() resp = self.client.get('/', follow_redirects=True) - self.check_content_in_response('/delete?dag_id={}'.format(test_dag_id), resp) - self.check_content_in_response("return confirmDeleteDag(this, '{}')".format(test_dag_id), resp) + self.check_content_in_response(f'/delete?dag_id={test_dag_id}', resp) + self.check_content_in_response(f"return confirmDeleteDag(this, '{test_dag_id}')", resp) self.session.query(DM).filter(DM.dag_id == test_dag_id).update({'dag_id': dag_id}) self.session.commit() @@ -1167,7 +1167,7 @@ def setUp(self): # Write the custom logging configuration to a file self.settings_folder = tempfile.mkdtemp() settings_file = os.path.join(self.settings_folder, "airflow_local_settings.py") - new_logging_file = "LOGGING_CONFIG = {}".format(logging_config) + new_logging_file = f"LOGGING_CONFIG = {logging_config}" with open(settings_file, 'w') as handle: handle.writelines(new_logging_file) sys.path.append(self.settings_folder) @@ -1243,7 +1243,7 @@ def test_get_file_task_log(self, state, try_number, expected_num_logs_visible): self.assertEqual(response.status_code, 200) self.assertIn('Log by attempts', response.data.decode('utf-8')) for num in range(1, expected_num_logs_visible + 1): - self.assertIn('log-group-{}'.format(num), response.data.decode('utf-8')) + self.assertIn(f'log-group-{num}', response.data.decode('utf-8')) self.assertNotIn('log-group-0', response.data.decode('utf-8')) self.assertNotIn('log-group-{}'.format(expected_num_logs_visible + 1), response.data.decode('utf-8')) @@ -1482,7 +1482,7 @@ def teardown(self): self.test.session.close() def assert_base_date_and_num_runs(self, base_date, num_runs, data): - self.test.assertNotIn('name="base_date" value="{}"'.format(base_date), data) + self.test.assertNotIn(f'name="base_date" value="{base_date}"', data) self.test.assertNotIn(''.format( num=num_runs), data) @@ -2707,7 +2707,7 @@ def test_trigger_dag_button(self): self.session.query(DR).delete() self.session.commit() - self.client.post('trigger?dag_id={}'.format(test_dag_id)) + self.client.post(f'trigger?dag_id={test_dag_id}') run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first() self.assertIsNotNone(run) @@ -2724,7 +2724,7 @@ def test_trigger_dag_conf(self): self.session.query(DR).delete() self.session.commit() - self.client.post('trigger?dag_id={}'.format(test_dag_id), data={'conf': json.dumps(conf_dict)}) + self.client.post(f'trigger?dag_id={test_dag_id}', data={'conf': json.dumps(conf_dict)}) run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first() self.assertIsNotNone(run) @@ -2739,7 +2739,7 @@ def test_trigger_dag_conf_malformed(self): self.session.query(DR).delete() self.session.commit() - response = self.client.post('trigger?dag_id={}'.format(test_dag_id), data={'conf': '{"a": "b"'}) + response = self.client.post(f'trigger?dag_id={test_dag_id}', data={'conf': '{"a": "b"'}) self.check_content_in_response('Invalid JSON configuration', response) run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first() @@ -2747,8 +2747,8 @@ def test_trigger_dag_conf_malformed(self): def test_trigger_dag_form(self): test_dag_id = "example_bash_operator" - resp = self.client.get('trigger?dag_id={}'.format(test_dag_id)) - self.check_content_in_response('Trigger DAG: {}'.format(test_dag_id), resp) + resp = self.client.get(f'trigger?dag_id={test_dag_id}') + self.check_content_in_response(f'Trigger DAG: {test_dag_id}', resp) @parameterized.expand([ ("javascript:alert(1)", "/home"), @@ -2759,7 +2759,7 @@ def test_trigger_dag_form(self): def test_trigger_dag_form_origin_url(self, test_origin, expected_origin): test_dag_id = "example_bash_operator" - resp = self.client.get('trigger?dag_id={}&origin={}'.format(test_dag_id, test_origin)) + resp = self.client.get(f'trigger?dag_id={test_dag_id}&origin={test_origin}') self.check_content_in_response( '