Skip to content

Commit

Permalink
Simplify conditions on len() in providers/amazon (#33565)
Browse files Browse the repository at this point in the history
  • Loading branch information
eumiro authored Aug 24, 2023
1 parent 9b8a093 commit 633217c
Show file tree
Hide file tree
Showing 7 changed files with 12 additions and 13 deletions.
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/emr.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def cancel_running_jobs(
for r in iterator:
job_ids = [jr["id"] for jr in r["jobRuns"]]
count += len(job_ids)
if len(job_ids) > 0:
if job_ids:
self.log.info(
"Cancelling %s pending job(s) for the application %s so that it can be stopped",
len(job_ids),
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@ async def _check_key_async(
if wildcard_match:
keys = await self.get_file_metadata_async(client, bucket_name, key)
key_matches = [k for k in keys if fnmatch.fnmatch(k["Key"], key)]
if len(key_matches) == 0:
if not key_matches:
return False
else:
obj = await self.get_head_object_async(client, key, bucket_name)
Expand Down
9 changes: 4 additions & 5 deletions airflow/providers/amazon/aws/hooks/sagemaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def secondary_training_status_changed(current_job_description: dict, prev_job_de
:return: Whether the secondary status message of a training job changed or not.
"""
current_secondary_status_transitions = current_job_description.get("SecondaryStatusTransitions")
if current_secondary_status_transitions is None or len(current_secondary_status_transitions) == 0:
if not current_secondary_status_transitions:
return False

prev_job_secondary_status_transitions = (
Expand All @@ -90,8 +90,7 @@ def secondary_training_status_changed(current_job_description: dict, prev_job_de

last_message = (
prev_job_secondary_status_transitions[-1]["StatusMessage"]
if prev_job_secondary_status_transitions is not None
and len(prev_job_secondary_status_transitions) > 0
if prev_job_secondary_status_transitions
else ""
)

Expand All @@ -111,7 +110,7 @@ def secondary_training_status_message(
:return: Job status string to be printed.
"""
current_transitions = job_description.get("SecondaryStatusTransitions")
if current_transitions is None or len(current_transitions) == 0:
if not current_transitions:
return ""

prev_transitions_num = 0
Expand Down Expand Up @@ -584,7 +583,7 @@ def describe_training_job_with_log(
# the container starts logging, so ignore any errors thrown about that
pass

if len(stream_names) > 0:
if stream_names:
for idx, event in self.multi_stream_iter(log_group, stream_names, positions):
self.log.info(event["message"])
ts, count = positions[stream_names[idx]]
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/operators/sagemaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -1632,7 +1632,7 @@ def execute(self, context: Context):
"DirectInternetAccess": self.direct_internet_access,
"RootAccess": self.root_access,
}
if len(self.create_instance_kwargs) > 0:
if self.create_instance_kwargs:
create_notebook_instance_kwargs.update(self.create_instance_kwargs)

self.log.info("Creating SageMaker notebook %s.", self.instance_name)
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/sensors/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def poke(self, context: Context) -> bool:
computeEnvironments=[self.compute_environment]
)

if len(response["computeEnvironments"]) == 0:
if not response["computeEnvironments"]:
raise AirflowException(f"AWS Batch compute environment {self.compute_environment} not found")

status = response["computeEnvironments"][0]["status"]
Expand Down Expand Up @@ -241,7 +241,7 @@ def poke(self, context: Context) -> bool:
jobQueues=[self.job_queue]
)

if len(response["jobQueues"]) == 0:
if not response["jobQueues"]:
if self.treat_non_existing_as_deleted:
return True
else:
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/sensors/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def _check_key(self, key):
prefix = re.split(r"[\[\*\?]", key, 1)[0]
keys = self.hook.get_file_metadata(prefix, bucket_name)
key_matches = [k for k in keys if fnmatch.fnmatch(k["Key"], key)]
if len(key_matches) == 0:
if not key_matches:
return False

# Reduce the set of metadata to size only
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/sensors/sqs.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def poke(self, context: Context):
self.message_filtering_config,
)

if not len(messages):
if not messages:
continue

message_batch.extend(messages)
Expand All @@ -203,7 +203,7 @@ def poke(self, context: Context):

if "Successful" not in response:
raise AirflowException(f"Delete SQS Messages failed {response} for messages {messages}")
if not len(message_batch):
if not message_batch:
return False

context["ti"].xcom_push(key="messages", value=message_batch)
Expand Down

0 comments on commit 633217c

Please sign in to comment.