Skip to content

Commit

Permalink
add logic to fetch only the latest
Browse files Browse the repository at this point in the history
  • Loading branch information
pindge committed Jun 15, 2022
1 parent fc9ede9 commit 64eaae1
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 6 deletions.
3 changes: 2 additions & 1 deletion cubedash/summary/_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,9 @@
Column("crses", postgres.ARRAY(String)),
# Size of this dataset in bytes, if the product includes it.
Column("size_bytes", BigInteger),
Column("regions_hash", String),
PrimaryKeyConstraint(
"product_ref", "start_day", "period_type", "regions"
"product_ref", "start_day", "period_type", "regions_hash",
),
CheckConstraint(
r"array_length(timeline_dataset_start_days, 1) = "
Expand Down
13 changes: 10 additions & 3 deletions cubedash/summary/_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -820,7 +820,7 @@ def get(
TIME_OVERVIEW.c.start_day == start_day,
TIME_OVERVIEW.c.period_type == period,
)
)
).order_by(TIME_OVERVIEW.c.generation_time.desc())
).fetchone()

if not res:
Expand Down Expand Up @@ -1086,11 +1086,17 @@ def _put(
region_values, _ = _counter_key_vals(summary.region_dataset_counts)

row = _summary_to_row(summary)

import hashlib
import json
ret = self._engine.execute(
postgres.insert(TIME_OVERVIEW)
.returning(TIME_OVERVIEW.c.generation_time)
.on_conflict_do_update(
index_elements=["product_ref", "start_day", "period_type", "regions"],
index_elements=[
"product_ref", "start_day", "period_type",
"regions_hash"
],
set_=row,
where=and_(
TIME_OVERVIEW.c.product_ref == product.id_,
Expand All @@ -1100,7 +1106,8 @@ def _put(
),
)
.values(
product_ref=product.id_, start_day=start_day, period_type=period, **row
product_ref=product.id_, start_day=start_day, period_type=period,
regions_hash=hashlib.sha224(json.dumps(region_values).encode("utf-8")).hexdigest(), **row
)
)

Expand Down
5 changes: 5 additions & 0 deletions integration_tests/test_page_loads.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,11 @@ def test_invalid_footprint_wofs_summary_load(client: FlaskClient):
html = get_html(client, "/wofs_summary")
check_dataset_count(html, 1244)

# this test setup the page will return 0 dataset listing
html = get_html(client, "/products/wofs_summary/datasets")
search_results = html.find(".search-result a")
assert len(search_results) == 0


def test_all_products_are_shown(client: FlaskClient):
"""
Expand Down
3 changes: 1 addition & 2 deletions integration_tests/test_summarise_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,7 @@ def test_generate_incremental_archivals(run_generate, summary_store: SummaryStor
), "Expected dataset count to decrease after archival"
finally:
# Now let's restore the dataset!
# index.datasets.restore([dataset_id])
pass
index.datasets.restore([dataset_id])

# It should be in the count again.
# (this change should work because the new 'updated' column will be bumped on restore)
Expand Down

0 comments on commit 64eaae1

Please sign in to comment.