Skip to content

Commit

Permalink
Merge branch 'main' into docs-link-color
Browse files Browse the repository at this point in the history
  • Loading branch information
edgarrmondragon authored Mar 28, 2023
2 parents c0aeac2 + 5c31332 commit f85d483
Show file tree
Hide file tree
Showing 16 changed files with 57 additions and 41 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ repos:
)$
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.257
rev: v0.0.259
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
Expand Down
12 changes: 6 additions & 6 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 6 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -221,14 +221,18 @@ ignore = [
]
line-length = 88
select = [
"E",
"F",
"F", # Pyflakes
"E", # pycodestyle (error)
"W", # pycodestyle (warning)
"I", # isort
"D", # pydocstyle/flake8-docstrings
"UP", # pyupgrade
"YTT", # flake8-2020
"ANN", # flake8-annotations
"BLE", # flake8-blind-except
"B", # flake8-bugbear
"COM", # flake8-commas
"C4", # flake8-comprehensions
"T10", # flake8-debugger
"ISC", # flake8-implicit-str-concat
"ICN", # flake8-import-conventions
Expand Down
8 changes: 5 additions & 3 deletions singer_sdk/authenticators.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,11 +499,13 @@ def update_access_token(self) -> None:
token_response = requests.post(self.auth_endpoint, data=auth_request_payload)
try:
token_response.raise_for_status()
self.logger.info("OAuth authorization attempt was successful.")
except Exception as ex:
except requests.HTTPError as ex:
raise RuntimeError(
f"Failed OAuth login, response was '{token_response.json()}'. {ex}",
)
) from ex

self.logger.info("OAuth authorization attempt was successful.")

token_json = token_response.json()
self.access_token = token_json["access_token"]
self.expires_in = token_json.get("expires_in", self._default_expiration)
Expand Down
13 changes: 8 additions & 5 deletions singer_sdk/connectors/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ def create_sqlalchemy_connection(self) -> sqlalchemy.engine.Connection:
"on the connector currently, make a child class and "
"add your required method on that connector.",
DeprecationWarning,
stacklevel=2,
)
return self._engine.connect().execution_options(stream_results=True)

Expand All @@ -118,6 +119,7 @@ def create_sqlalchemy_engine(self) -> Engine:
"`SQLConnector.create_sqlalchemy_engine` is deprecated. Override"
"`_engine` or sqlalchemy_url` instead.",
DeprecationWarning,
stacklevel=2,
)
return self._engine

Expand All @@ -137,6 +139,7 @@ def connection(self) -> sqlalchemy.engine.Connection:
"that isn't available on the connector currently, make a child "
"class and add your required method on that connector.",
DeprecationWarning,
stacklevel=2,
)
return self.create_sqlalchemy_connection()

Expand Down Expand Up @@ -331,7 +334,7 @@ def quote(self, name: str) -> str:
],
)

@lru_cache()
@lru_cache() # noqa: B019
def _warn_no_view_detection(self) -> None:
"""Print a warning, but only the first time."""
self.logger.warning(
Expand Down Expand Up @@ -670,10 +673,10 @@ def create_empty_table(
primary_keys = primary_keys or []
try:
properties: dict = schema["properties"]
except KeyError:
except KeyError as e:
raise RuntimeError(
f"Schema for '{full_table_name}' does not define properties: {schema}",
)
) from e
for property_name, property_jsonschema in properties.items():
is_primary_key = property_name in primary_keys
columns.append(
Expand Down Expand Up @@ -1042,7 +1045,7 @@ def remove_collation(
"""
if hasattr(column_type, "collation") and column_type.collation:
column_type_collation: str = column_type.collation
setattr(column_type, "collation", None)
column_type.collation = None
return column_type_collation
return None

Expand All @@ -1058,7 +1061,7 @@ def update_collation(
collation: The colation
"""
if hasattr(column_type, "collation") and collation:
setattr(column_type, "collation", collation)
column_type.collation = collation

def _adapt_column_type(
self,
Expand Down
5 changes: 1 addition & 4 deletions singer_sdk/helpers/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,7 @@ def is_date_or_datetime_type(type_dict: dict) -> bool:
True if date or date-time, else False.
"""
if "anyOf" in type_dict:
for type_dict in type_dict["anyOf"]:
if is_date_or_datetime_type(type_dict):
return True
return False
return any(is_date_or_datetime_type(option) for option in type_dict["anyOf"])

if "type" in type_dict:
return type_dict.get("format") in {"date", "date-time"}
Expand Down
2 changes: 1 addition & 1 deletion singer_sdk/io_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,4 +129,4 @@ def _process_unknown_message(self, message_dict: dict) -> None:
raise ValueError(f"Unknown message type '{record_type}' in message.")

def _process_endofpipe(self) -> None:
pass
logger.debug("End of pipe reached")
6 changes: 4 additions & 2 deletions singer_sdk/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,11 +342,13 @@ def _eval(
functions=self.functions,
names=names,
)
logging.debug(f"Eval result: {expr} = {result}")
except Exception as ex:
except (simpleeval.InvalidExpression, SyntaxError) as ex:
raise MapExpressionError(
f"Failed to evaluate simpleeval expressions {expr}.",
) from ex

logging.debug(f"Eval result: {expr} = {result}")

return result

def _eval_type(
Expand Down
7 changes: 5 additions & 2 deletions singer_sdk/sinks/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,11 +333,11 @@ def _parse_timestamps_in_record(
for key in record:
datelike_type = get_datelike_property_type(schema["properties"][key])
if datelike_type:
date_val = record[key]
try:
date_val = record[key]
if record[key] is not None:
date_val = parser.parse(date_val)
except Exception as ex:
except parser.ParserError as ex:
date_val = handle_invalid_timestamp_in_record(
record,
[key],
Expand All @@ -355,6 +355,7 @@ def _after_process_record(self, context: dict) -> None:
Args:
context: Stream partition or context dictionary.
"""
self.logger.debug("Processed record: %s", context)

# SDK developer overrides:

Expand Down Expand Up @@ -447,6 +448,7 @@ def setup(self) -> None:
Setup is executed once per Sink instance, after instantiation. If a Schema
change is detected, a new Sink is instantiated and this method is called again.
"""
self.logger.info("Setting up %s", self.stream_name)

def clean_up(self) -> None:
"""Perform any clean up actions required at end of a stream.
Expand All @@ -455,6 +457,7 @@ def clean_up(self) -> None:
that may be in use from other instances of the same sink. Stream name alone
should not be relied on, it's recommended to use a uuid as well.
"""
self.logger.info("Cleaning up %s", self.stream_name)

def process_batch_files(
self,
Expand Down
1 change: 1 addition & 0 deletions singer_sdk/streams/rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,6 +493,7 @@ def get_new_paginator(self) -> BaseAPIPaginator:
"in a future version of the Meltano Singer SDK. "
"Override `RESTStream.get_new_paginator` instead.",
DeprecationWarning,
stacklevel=2,
)
return LegacyStreamPaginator(self) # type: ignore

Expand Down
13 changes: 6 additions & 7 deletions singer_sdk/testing/tap_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def test(self) -> None:
or self.stream.name in self.config.ignore_no_records_for_streams
):
# only warn if this or all streams are set to ignore no records
warnings.warn(UserWarning(no_records_message))
warnings.warn(UserWarning(no_records_message), stacklevel=2)
else:
record_count = len(self.stream_records)
assert record_count > 0, no_records_message
Expand All @@ -90,6 +90,7 @@ def test(self) -> None:
if diff:
warnings.warn(
UserWarning(f"Fields in catalog but not in records: ({diff})"),
stacklevel=2,
)


Expand Down Expand Up @@ -123,7 +124,7 @@ def test(self) -> None:
(r[k] for k in primary_keys or []) for r in self.stream_records
]
except KeyError as e:
raise AssertionError(f"Record missing primary key: {str(e)}")
raise AssertionError(f"Record missing primary key: {str(e)}") from e
count_unique_records = len(set(record_ids))
count_records = len(self.stream_records)
assert count_unique_records == count_records, (
Expand Down Expand Up @@ -281,11 +282,9 @@ def test(self) -> None:
AssertionError: if value cannot be cast to float type.
"""
for v in self.non_null_attribute_values:
try:
error_message = f"Unable to cast value ('{v}') to float type."
assert isinstance(v, (float, int)), error_message
except Exception as e:
raise AssertionError(error_message) from e
error_message = f"Unable to cast value ('{v}') to float type."
if not isinstance(v, (float, int)):
raise AssertionError(error_message)

@classmethod
def evaluate(
Expand Down
5 changes: 4 additions & 1 deletion singer_sdk/testing/templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,10 @@ def non_null_attribute_values(self) -> list[Any]:
if r.get(self.attribute_name) is not None
]
if not values:
warnings.warn(UserWarning("No records were available to test."))
warnings.warn(
UserWarning("No records were available to test."),
stacklevel=2,
)
return values

@classmethod
Expand Down
6 changes: 3 additions & 3 deletions tests/core/test_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,9 +256,9 @@ def filter_stream_maps():

@pytest.fixture
def filter_stream_map_w_error(filter_stream_maps):
restult = copy.copy(filter_stream_maps)
restult["repositories"]["__filter__"] = "this should raise an er!ror"
return restult
result = copy.copy(filter_stream_maps)
result["repositories"]["__filter__"] = "this should raise an er!ror"
return result


@pytest.fixture
Expand Down
1 change: 1 addition & 0 deletions tests/external/test_tap_gitlab.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
"Could not configure external gitlab tests. "
f"Config in CI is expected via env vars.\n{e}",
),
stacklevel=2,
)

COUNTER = 0
Expand Down
1 change: 1 addition & 0 deletions tests/external/test_tap_google_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,5 @@
"Could not configure external gitlab tests. "
f"Config in CI is expected via env vars.\n{e}",
),
stacklevel=2,
)
8 changes: 4 additions & 4 deletions tests/samples/test_target_sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,17 +103,17 @@ def test_sync_sqlite_to_sqlite(
):
try:
orig_json = json.loads(orig_out)
except json.JSONDecodeError:
except json.JSONDecodeError as e:
raise RuntimeError(
f"Could not parse JSON in orig line {line_num}: {orig_out}",
)
) from e

try:
tapped_json = json.loads(new_out)
except json.JSONDecodeError:
except json.JSONDecodeError as e:
raise RuntimeError(
f"Could not parse JSON in new line {line_num}: {new_out}",
)
) from e

assert (
tapped_json["type"] == orig_json["type"]
Expand Down

0 comments on commit f85d483

Please sign in to comment.