Skip to content

Commit

Permalink
main: Fix handling unlimited archiving run duration
Browse files Browse the repository at this point in the history
  • Loading branch information
spbnick committed Nov 19, 2024
1 parent 97fd423 commit 7735c32
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 2 deletions.
5 changes: 3 additions & 2 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,8 +510,9 @@ def kcidb_archive(event, context):
# Find the maximum timestamp of the data we need to fetch
# We try to align all tables on a single time boundary
until = min(
datetime.datetime.max if data_max_duration is None
else min_after + data_max_duration,
datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
if data_max_duration is None else
min_after + data_max_duration,
op_now - data_min_age
)

Expand Down
16 changes: 16 additions & 0 deletions test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,3 +454,19 @@ def gen_data(id, ts):
for obj in dump.get(obj_list_name, []))
for obj_list_name in op_schema.id_fields
), "No complete three-week old data in the archive"

# Empty the archive
ar_client.empty()
# Trigger a run of full archiving at once, and wait
del params["data_max_duration"]
publisher.publish(params)
time.sleep(60)
# Check both data_4w and data_3w are in the archive database
dump = ar_client.dump()
assert all(
any(obj["id"] == "archive:4w"
for obj in dump.get(obj_list_name, [])) and
any(obj["id"] == "archive:3w"
for obj in dump.get(obj_list_name, []))
for obj_list_name in op_schema.id_fields
), "No complete four- and three-week old data in the archive"

0 comments on commit 7735c32

Please sign in to comment.