Skip to content

Commit

Permalink
chore: Enable B (flake8-bugbear) Ruff checks (#1551)
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon authored Mar 28, 2023
1 parent 7d87151 commit 5c31332
Show file tree
Hide file tree
Showing 12 changed files with 29 additions and 18 deletions.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,7 @@ select = [
"YTT", # flake8-2020
"ANN", # flake8-annotations
"BLE", # flake8-blind-except
"B", # flake8-bugbear
"COM", # flake8-commas
"C4", # flake8-comprehensions
"T10", # flake8-debugger
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/authenticators.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,7 @@ def update_access_token(self) -> None:
except requests.HTTPError as ex:
raise RuntimeError(
f"Failed OAuth login, response was '{token_response.json()}'. {ex}",
)
) from ex

self.logger.info("OAuth authorization attempt was successful.")

Expand Down
13 changes: 8 additions & 5 deletions singer_sdk/connectors/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ def create_sqlalchemy_connection(self) -> sqlalchemy.engine.Connection:
"on the connector currently, make a child class and "
"add your required method on that connector.",
DeprecationWarning,
stacklevel=2,
)
return self._engine.connect().execution_options(stream_results=True)

Expand All @@ -118,6 +119,7 @@ def create_sqlalchemy_engine(self) -> Engine:
"`SQLConnector.create_sqlalchemy_engine` is deprecated. Override"
"`_engine` or sqlalchemy_url` instead.",
DeprecationWarning,
stacklevel=2,
)
return self._engine

Expand All @@ -137,6 +139,7 @@ def connection(self) -> sqlalchemy.engine.Connection:
"that isn't available on the connector currently, make a child "
"class and add your required method on that connector.",
DeprecationWarning,
stacklevel=2,
)
return self.create_sqlalchemy_connection()

Expand Down Expand Up @@ -331,7 +334,7 @@ def quote(self, name: str) -> str:
],
)

@lru_cache()
@lru_cache() # noqa: B019
def _warn_no_view_detection(self) -> None:
"""Print a warning, but only the first time."""
self.logger.warning(
Expand Down Expand Up @@ -670,10 +673,10 @@ def create_empty_table(
primary_keys = primary_keys or []
try:
properties: dict = schema["properties"]
except KeyError:
except KeyError as e:
raise RuntimeError(
f"Schema for '{full_table_name}' does not define properties: {schema}",
)
) from e
for property_name, property_jsonschema in properties.items():
is_primary_key = property_name in primary_keys
columns.append(
Expand Down Expand Up @@ -1042,7 +1045,7 @@ def remove_collation(
"""
if hasattr(column_type, "collation") and column_type.collation:
column_type_collation: str = column_type.collation
setattr(column_type, "collation", None)
column_type.collation = None
return column_type_collation
return None

Expand All @@ -1058,7 +1061,7 @@ def update_collation(
collation: The colation
"""
if hasattr(column_type, "collation") and collation:
setattr(column_type, "collation", collation)
column_type.collation = collation

def _adapt_column_type(
self,
Expand Down
5 changes: 1 addition & 4 deletions singer_sdk/helpers/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,7 @@ def is_date_or_datetime_type(type_dict: dict) -> bool:
True if date or date-time, else False.
"""
if "anyOf" in type_dict:
for type_dict in type_dict["anyOf"]:
if is_date_or_datetime_type(type_dict):
return True
return False
return any(is_date_or_datetime_type(option) for option in type_dict["anyOf"])

if "type" in type_dict:
return type_dict.get("format") in {"date", "date-time"}
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/io_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,4 +129,4 @@ def _process_unknown_message(self, message_dict: dict) -> None:
raise ValueError(f"Unknown message type '{record_type}' in message.")

def _process_endofpipe(self) -> None:
pass
logger.debug("End of pipe reached")
3 changes: 3 additions & 0 deletions singer_sdk/sinks/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,6 +355,7 @@ def _after_process_record(self, context: dict) -> None:
Args:
context: Stream partition or context dictionary.
"""
self.logger.debug("Processed record: %s", context)

# SDK developer overrides:

Expand Down Expand Up @@ -447,6 +448,7 @@ def setup(self) -> None:
Setup is executed once per Sink instance, after instantiation. If a Schema
change is detected, a new Sink is instantiated and this method is called again.
"""
self.logger.info("Setting up %s", self.stream_name)

def clean_up(self) -> None:
"""Perform any clean up actions required at end of a stream.
Expand All @@ -455,6 +457,7 @@ def clean_up(self) -> None:
that may be in use from other instances of the same sink. Stream name alone
should not be relied on, it's recommended to use a uuid as well.
"""
self.logger.info("Cleaning up %s", self.stream_name)

def process_batch_files(
self,
Expand Down
1 change: 1 addition & 0 deletions singer_sdk/streams/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,6 +493,7 @@ def get_new_paginator(self) -> BaseAPIPaginator:
"in a future version of the Meltano Singer SDK. "
"Override `RESTStream.get_new_paginator` instead.",
DeprecationWarning,
stacklevel=2,
)
return LegacyStreamPaginator(self) # type: ignore

Expand Down
5 changes: 3 additions & 2 deletions singer_sdk/testing/tap_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def test(self) -> None:
or self.stream.name in self.config.ignore_no_records_for_streams
):
# only warn if this or all streams are set to ignore no records
warnings.warn(UserWarning(no_records_message))
warnings.warn(UserWarning(no_records_message), stacklevel=2)
else:
record_count = len(self.stream_records)
assert record_count > 0, no_records_message
Expand All @@ -90,6 +90,7 @@ def test(self) -> None:
if diff:
warnings.warn(
UserWarning(f"Fields in catalog but not in records: ({diff})"),
stacklevel=2,
)


Expand Down Expand Up @@ -123,7 +124,7 @@ def test(self) -> None:
(r[k] for k in primary_keys or []) for r in self.stream_records
]
except KeyError as e:
raise AssertionError(f"Record missing primary key: {str(e)}")
raise AssertionError(f"Record missing primary key: {str(e)}") from e
count_unique_records = len(set(record_ids))
count_records = len(self.stream_records)
assert count_unique_records == count_records, (
Expand Down
5 changes: 4 additions & 1 deletion singer_sdk/testing/templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,10 @@ def non_null_attribute_values(self) -> list[Any]:
if r.get(self.attribute_name) is not None
]
if not values:
warnings.warn(UserWarning("No records were available to test."))
warnings.warn(
UserWarning("No records were available to test."),
stacklevel=2,
)
return values

@classmethod
Expand Down
1 change: 1 addition & 0 deletions tests/external/test_tap_gitlab.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"Could not configure external gitlab tests. "
f"Config in CI is expected via env vars.\n{e}",
),
stacklevel=2,
)

COUNTER = 0
Expand Down
1 change: 1 addition & 0 deletions tests/external/test_tap_google_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@
"Could not configure external gitlab tests. "
f"Config in CI is expected via env vars.\n{e}",
),
stacklevel=2,
)
8 changes: 4 additions & 4 deletions tests/samples/test_target_sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,17 +103,17 @@ def test_sync_sqlite_to_sqlite(
):
try:
orig_json = json.loads(orig_out)
except json.JSONDecodeError:
except json.JSONDecodeError as e:
raise RuntimeError(
f"Could not parse JSON in orig line {line_num}: {orig_out}",
)
) from e

try:
tapped_json = json.loads(new_out)
except json.JSONDecodeError:
except json.JSONDecodeError as e:
raise RuntimeError(
f"Could not parse JSON in new line {line_num}: {new_out}",
)
) from e

assert (
tapped_json["type"] == orig_json["type"]
Expand Down

0 comments on commit 5c31332

Please sign in to comment.