Skip to content

Commit

Permalink
Rename ConformanceLevel to TypeConformanceLevel
Browse files Browse the repository at this point in the history
Also document the field.
  • Loading branch information
edgarrmondragon committed Jan 3, 2023
1 parent 31a4d73 commit 254735c
Show file tree
Hide file tree
Showing 4 changed files with 41 additions and 22 deletions.
12 changes: 6 additions & 6 deletions singer_sdk/helpers/_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ def _warn_unmapped_properties(
)


class ConformanceLevel(Enum):
class TypeConformanceLevel(Enum):
"""Used to configure how data is conformed to json compatible types.
Before outputting data as JSON, it is conformed to types that are valid in json,
Expand Down Expand Up @@ -316,7 +316,7 @@ def conform_record_data_types( # noqa: C901
stream_name: str,
record: Dict[str, Any],
schema: dict,
level: ConformanceLevel,
level: TypeConformanceLevel,
logger: logging.Logger,
) -> Dict[str, Any]:
"""Translate values in record dictionary to singer-compatible data types.
Expand All @@ -335,7 +335,7 @@ def conform_record_data_types( # noqa: C901
def _conform_record_data_types(
input_object: Dict[str, Any],
schema: dict,
level: ConformanceLevel,
level: TypeConformanceLevel,
parent: Optional[str],
) -> Tuple[Dict[str, Any], List[str]]: # noqa: C901
"""Translate values in record dictionary to singer-compatible data types.
Expand All @@ -354,7 +354,7 @@ def _conform_record_data_types(
output_object: Dict[str, Any] = {}
unmapped_properties: List[str] = []

if level == ConformanceLevel.NONE:
if level == TypeConformanceLevel.NONE:
return input_object, unmapped_properties

for property_name, elem in input_object.items():
Expand All @@ -367,7 +367,7 @@ def _conform_record_data_types(

property_schema = schema["properties"][property_name]
if isinstance(elem, list) and is_uniform_list(property_schema):
if level == ConformanceLevel.RECURSIVE:
if level == TypeConformanceLevel.RECURSIVE:
item_schema = property_schema["items"]
output = []
for item in elem:
Expand All @@ -390,7 +390,7 @@ def _conform_record_data_types(
and is_object_type(property_schema)
and "properties" in property_schema
):
if level == ConformanceLevel.RECURSIVE:
if level == TypeConformanceLevel.RECURSIVE:
(
output_object[property_name],
sub_unmapped_properties,
Expand Down
25 changes: 22 additions & 3 deletions singer_sdk/streams/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
write_starting_replication_value,
)
from singer_sdk.helpers._typing import (
ConformanceLevel,
TypeConformanceLevel,
conform_record_data_types,
is_datetime_type,
)
Expand Down Expand Up @@ -81,12 +81,31 @@ def lazy_chunked_generator(
class Stream(metaclass=abc.ABCMeta):
"""Abstract base class for tap streams."""

STATE_MSG_FREQUENCY = 10000 # Number of records between state messages
STATE_MSG_FREQUENCY = 10000
"""Number of records between state messages."""

_MAX_RECORDS_LIMIT: int | None = None
CONFORMANCE_LEVEL = ConformanceLevel.RECURSIVE

TYPE_CONFORMANCE_LEVEL = TypeConformanceLevel.RECURSIVE
"""Type conformance level for this stream.
Field types in the schema are used to convert record field values to the correct
type.
Available options are:
- ``TypeConformanceLevel.NONE``: No conformance is performed.
- ``TypeConformanceLevel.RECURSIVE``: Conformance is performed recursively through
all nested levels in the record.
- ``TypeConformanceLevel.ROOT``: Conformance is performed only on the root level.
"""

# Used for nested stream relationships
parent_stream_type: type[Stream] | None = None
"""Parent stream type for this stream. If this stream is a child stream, this should
be set to the parent stream class.
"""

ignore_parent_replication_key: bool = False

# Internal API cost aggregator
Expand Down
4 changes: 2 additions & 2 deletions tests/core/test_record_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import pytest

from singer_sdk.helpers._typing import (
ConformanceLevel,
TypeConformanceLevel,
conform_record_data_types,
get_datelike_property_type,
to_json_compatible,
Expand Down Expand Up @@ -67,7 +67,7 @@ def test_conform_record_data_types(

with caplog.at_level(logging.INFO, logger=logger.name):
actual = conform_record_data_types(
stream_name, record, schema, ConformanceLevel.RECURSIVE, logger
stream_name, record, schema, TypeConformanceLevel.RECURSIVE, logger
)
if ignore_props_message:
assert ignore_props_message in caplog.text
Expand Down
22 changes: 11 additions & 11 deletions tests/core/test_typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import pytest

from singer_sdk.helpers._typing import (
ConformanceLevel,
TypeConformanceLevel,
_conform_primitive_property,
conform_record_data_types,
)
Expand Down Expand Up @@ -38,7 +38,7 @@ def test_simple_schema_conforms_types():
}

actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output

Expand All @@ -55,7 +55,7 @@ def test_primitive_arrays_are_conformed():
expected_output = {"list": [True, False]}

actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output

Expand All @@ -80,7 +80,7 @@ def test_only_root_fields_are_conformed_for_root_level():
}

actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.ROOT_ONLY, logger
"test_stream", record, schema, TypeConformanceLevel.ROOT_ONLY, logger
)
assert actual_output == expected_output

Expand All @@ -99,7 +99,7 @@ def test_no_fields_are_conformed_for_none_level():
}

actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.NONE, logger
"test_stream", record, schema, TypeConformanceLevel.NONE, logger
)
assert actual_output == record

Expand All @@ -114,7 +114,7 @@ def test_object_arrays_are_conformed():
expected_output = {"list": [{"value": True}, {"value": False}]}

actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output

Expand All @@ -138,7 +138,7 @@ def test_mixed_arrays_are_conformed():
expected_output = {"list": [{"value": True}, False]}

actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output

Expand All @@ -153,7 +153,7 @@ def test_nested_objects_are_conformed():
expected_output = {"object": {"value": True}}

actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output

Expand All @@ -169,7 +169,7 @@ def test_simple_schema_removes_types(caplog: pytest.LogCaptureFixture):

with caplog.at_level(logging.WARNING):
actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output
assert caplog.records[0].message == (
Expand All @@ -189,7 +189,7 @@ def test_nested_objects_remove_types(caplog: pytest.LogCaptureFixture):

with caplog.at_level(logging.WARNING):
actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output
assert caplog.records[0].message == (
Expand All @@ -209,7 +209,7 @@ def test_object_arrays_remove_types(caplog: pytest.LogCaptureFixture):

with caplog.at_level(logging.WARNING):
actual_output = conform_record_data_types(
"test_stream", record, schema, ConformanceLevel.RECURSIVE, logger
"test_stream", record, schema, TypeConformanceLevel.RECURSIVE, logger
)
assert actual_output == expected_output
assert caplog.records[0].message == (
Expand Down

0 comments on commit 254735c

Please sign in to comment.