-
Notifications
You must be signed in to change notification settings - Fork 88
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Move to API version 2019-10 and bump SDK version to latest (#54)
* Move to API version 2019-07 and bump SDK version to latest * Move tests to this repo and bump api version to 2019-10 * Fix start date test
- Loading branch information
Showing
14 changed files
with
1,018 additions
and
17 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,7 +2,7 @@ version: 2 | |
jobs: | ||
build: | ||
docker: | ||
- image: 218546966473.dkr.ecr.us-east-1.amazonaws.com/circle-ci:tap-tester | ||
- image: 218546966473.dkr.ecr.us-east-1.amazonaws.com/circle-ci:tap-tester-v4 | ||
steps: | ||
- checkout | ||
- run: | ||
|
@@ -24,13 +24,13 @@ jobs: | |
aws s3 cp s3://com-stitchdata-dev-deployment-assets/environments/tap-tester/sandbox dev_env.sh | ||
source dev_env.sh | ||
source /usr/local/share/virtualenvs/tap-tester/bin/activate | ||
run-a-test --tap=tap-shopify \ | ||
--target=target-stitch \ | ||
--orchestrator=stitch-orchestrator \ | ||
[email protected] \ | ||
--password=$SANDBOX_PASSWORD \ | ||
--client-id=50 \ | ||
tap_tester.suites.shopify | ||
run-test --tap=tap-shopify \ | ||
--target=target-stitch \ | ||
--orchestrator=stitch-orchestrator \ | ||
[email protected] \ | ||
--password=$SANDBOX_PASSWORD \ | ||
--client-id=50 \ | ||
tests | ||
workflows: | ||
version: 2 | ||
commit: | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,4 +2,4 @@ | |
|
||
test: | ||
pylint tap_shopify -d missing-docstring | ||
nosetests | ||
nosetests tests/unittests |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,278 @@ | ||
""" | ||
Setup expectations for test sub classes | ||
Run discovery for as a prerequisite for most tests | ||
""" | ||
import unittest | ||
import os | ||
from datetime import datetime as dt | ||
from datetime import timezone as tz | ||
|
||
from tap_tester import connections, menagerie, runner | ||
|
||
|
||
class BaseTapTest(unittest.TestCase): | ||
""" | ||
Setup expectations for test sub classes | ||
Run discovery for as a prerequisite for most tests | ||
""" | ||
|
||
REPLICATION_KEYS = "valid-replication-keys" | ||
PRIMARY_KEYS = "table-key-properties" | ||
FOREIGN_KEYS = "table-foreign-key-properties" | ||
REPLICATION_METHOD = "forced-replication-method" | ||
API_LIMIT = "max-row-limit" | ||
INCREMENTAL = "INCREMENTAL" | ||
FULL = "FULL_TABLE" | ||
START_DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" | ||
|
||
@staticmethod | ||
def name(): | ||
"""The name of the test within the suite""" | ||
return "test_name" | ||
|
||
@staticmethod | ||
def tap_name(): | ||
"""The name of the tap""" | ||
return "tap-shopify" | ||
|
||
@staticmethod | ||
def get_type(): | ||
"""the expected url route ending""" | ||
return "platform.shopify" | ||
|
||
def get_properties(self, original: bool = True): | ||
"""Configuration properties required for the tap.""" | ||
return_value = { | ||
'start_date': '2017-07-01T00:00:00Z', | ||
'shop': 'stitchdatawearhouse', | ||
'date_window_size': 30 | ||
} | ||
|
||
if original: | ||
return return_value | ||
|
||
# This test needs the new connections start date to be larger than the default | ||
assert self.start_date > return_value["start_date"] | ||
|
||
return_value["start_date"] = self.start_date | ||
return return_value | ||
|
||
@staticmethod | ||
def get_credentials(): | ||
"""Authentication information for the test account""" | ||
return { | ||
'api_key': os.getenv('TAP_SHOPIFY_API_KEY') | ||
} | ||
|
||
def expected_metadata(self): | ||
"""The expected streams and metadata about the streams""" | ||
|
||
default = { | ||
self.REPLICATION_KEYS: {"updated_at"}, | ||
self.PRIMARY_KEYS: {"id"}, | ||
self.REPLICATION_METHOD: self.INCREMENTAL, | ||
self.API_LIMIT: 250} | ||
|
||
meta = default.copy() | ||
meta.update({self.FOREIGN_KEYS: {"owner_id", "owner_resource"}}) | ||
|
||
return { | ||
"abandoned_checkouts": default, | ||
"collects": default, | ||
"custom_collections": default, | ||
"customers": default, | ||
"orders": default, | ||
"order_refunds": { | ||
self.REPLICATION_KEYS: {"created_at"}, | ||
self.PRIMARY_KEYS: {"id"}, | ||
self.REPLICATION_METHOD: self.INCREMENTAL, | ||
self.API_LIMIT: 250}, | ||
"products": default, | ||
"metafields": meta, | ||
"transactions": { | ||
self.REPLICATION_KEYS: {"created_at"}, | ||
self.PRIMARY_KEYS: {"id"}, | ||
self.FOREIGN_KEYS: {"order_id"}, | ||
self.REPLICATION_METHOD: self.INCREMENTAL, | ||
self.API_LIMIT: 250} | ||
} | ||
|
||
def expected_streams(self): | ||
"""A set of expected stream names""" | ||
return set(self.expected_metadata().keys()) | ||
|
||
def child_streams(self): | ||
""" | ||
Return a set of streams that are child streams | ||
based on having foreign key metadata | ||
""" | ||
return {stream for stream, metadata in self.expected_metadata().items() | ||
if metadata.get(self.FOREIGN_KEYS)} | ||
|
||
def expected_primary_keys(self): | ||
""" | ||
return a dictionary with key of table name | ||
and value as a set of primary key fields | ||
""" | ||
return {table: properties.get(self.PRIMARY_KEYS, set()) | ||
for table, properties | ||
in self.expected_metadata().items()} | ||
|
||
def expected_replication_keys(self): | ||
""" | ||
return a dictionary with key of table name | ||
and value as a set of replication key fields | ||
""" | ||
return {table: properties.get(self.REPLICATION_KEYS, set()) | ||
for table, properties | ||
in self.expected_metadata().items()} | ||
|
||
def expected_foreign_keys(self): | ||
""" | ||
return a dictionary with key of table name | ||
and value as a set of foreign key fields | ||
""" | ||
return {table: properties.get(self.FOREIGN_KEYS, set()) | ||
for table, properties | ||
in self.expected_metadata().items()} | ||
|
||
def expected_replication_method(self): | ||
"""return a dictionary with key of table name nd value of replication method""" | ||
return {table: properties.get(self.REPLICATION_METHOD, None) | ||
for table, properties | ||
in self.expected_metadata().items()} | ||
|
||
def setUp(self): | ||
"""Verify that you have set the prerequisites to run the tap (creds, etc.)""" | ||
missing_envs = [x for x in [os.getenv('TAP_SHOPIFY_API_KEY')] if x is None] | ||
if missing_envs: | ||
raise Exception("set environment variables") | ||
|
||
def test_run(self): | ||
""" | ||
Default Test Setup | ||
Remove previous connections (with the same name) | ||
Create a new connection (with the properties and credentials above) | ||
Run discovery and ensure it completes successfully | ||
""" | ||
self.do_test(self.create_connection()) | ||
|
||
def do_test(self, conn_id): | ||
"""A placeholder test to override in sub-class tests""" | ||
|
||
######################### | ||
# Helper Methods # | ||
######################### | ||
|
||
def create_connection(self, original_properties: bool = True): | ||
"""Create a new connection with the test name""" | ||
# Create the connection | ||
conn_id = connections.ensure_connection(self, original_properties) | ||
|
||
# Run a check job using orchestrator (discovery) | ||
check_job_name = runner.run_check_mode(self, conn_id) | ||
|
||
# Assert that the check job succeeded | ||
exit_status = menagerie.get_exit_status(conn_id, check_job_name) | ||
menagerie.verify_check_exit_status(self, exit_status, check_job_name) | ||
return conn_id | ||
|
||
def run_sync(self, conn_id): | ||
""" | ||
Run a sync job and make sure it exited properly. | ||
Return a dictionary with keys of streams synced | ||
and values of records synced for each stream | ||
""" | ||
# Run a sync job using orchestrator | ||
sync_job_name = runner.run_sync_mode(self, conn_id) | ||
|
||
# Verify tap and target exit codes | ||
exit_status = menagerie.get_exit_status(conn_id, sync_job_name) | ||
menagerie.verify_sync_exit_status(self, exit_status, sync_job_name) | ||
|
||
# Verify actual rows were synced | ||
sync_record_count = runner.examine_target_output_file( | ||
self, conn_id, self.expected_streams(), self.expected_primary_keys()) | ||
return sync_record_count | ||
|
||
@staticmethod | ||
def local_to_utc(date: dt): | ||
"""Convert a datetime with timezone information to utc""" | ||
utc = dt(date.year, date.month, date.day, date.hour, date.minute, | ||
date.second, date.microsecond, tz.utc) | ||
|
||
if date.tzinfo and hasattr(date.tzinfo, "_offset"): | ||
utc += date.tzinfo._offset | ||
|
||
return utc | ||
|
||
def max_bookmarks_by_stream(self, sync_records): | ||
""" | ||
Return the maximum value for the replication key for each stream | ||
which is the bookmark expected value. | ||
Comparisons are based on the class of the bookmark value. Dates will be | ||
string compared which works for ISO date-time strings | ||
""" | ||
max_bookmarks = {} | ||
for stream, batch in sync_records.items(): | ||
|
||
upsert_messages = [m for m in batch.get('messages') if m['action'] == 'upsert'] | ||
stream_bookmark_key = self.expected_replication_keys().get(stream, set()) | ||
assert len(stream_bookmark_key) == 1 # There shouldn't be a compound replication key | ||
stream_bookmark_key = stream_bookmark_key.pop() | ||
|
||
bk_values = [message["data"].get(stream_bookmark_key) for message in upsert_messages] | ||
max_bookmarks[stream] = {stream_bookmark_key: None} | ||
for bk_value in bk_values: | ||
if bk_value is None: | ||
continue | ||
|
||
if max_bookmarks[stream][stream_bookmark_key] is None: | ||
max_bookmarks[stream][stream_bookmark_key] = bk_value | ||
|
||
if bk_value > max_bookmarks[stream][stream_bookmark_key]: | ||
max_bookmarks[stream][stream_bookmark_key] = bk_value | ||
return max_bookmarks | ||
|
||
def min_bookmarks_by_stream(self, sync_records): | ||
"""Return the minimum value for the replication key for each stream""" | ||
min_bookmarks = {} | ||
for stream, batch in sync_records.items(): | ||
|
||
upsert_messages = [m for m in batch.get('messages') if m['action'] == 'upsert'] | ||
stream_bookmark_key = self.expected_replication_keys().get(stream, set()) | ||
assert len(stream_bookmark_key) == 1 # There shouldn't be a compound replication key | ||
(stream_bookmark_key, ) = stream_bookmark_key | ||
|
||
bk_values = [message["data"].get(stream_bookmark_key) for message in upsert_messages] | ||
min_bookmarks[stream] = {stream_bookmark_key: None} | ||
for bk_value in bk_values: | ||
if bk_value is None: | ||
continue | ||
|
||
if min_bookmarks[stream][stream_bookmark_key] is None: | ||
min_bookmarks[stream][stream_bookmark_key] = bk_value | ||
|
||
if bk_value < min_bookmarks[stream][stream_bookmark_key]: | ||
min_bookmarks[stream][stream_bookmark_key] = bk_value | ||
return min_bookmarks | ||
|
||
@staticmethod | ||
def select_all_streams_and_fields(conn_id, catalogs, select_all_fields: bool = True): | ||
"""Select all streams and all fields within streams""" | ||
for catalog in catalogs: | ||
schema = menagerie.get_annotated_schema(conn_id, catalog['stream_id']) | ||
|
||
non_selected_properties = [] | ||
if not select_all_fields: | ||
# get a list of all properties so that none are selected | ||
non_selected_properties = schema.get('annotated-schema', {}).get( | ||
'properties', {}).keys() | ||
|
||
connections.select_catalog_and_fields_via_metadata( | ||
conn_id, catalog, schema, [], non_selected_properties) | ||
|
||
def __init__(self, *args, **kwargs): | ||
super().__init__(*args, **kwargs) | ||
self.start_date = self.get_properties().get("start_date") |
Oops, something went wrong.