You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Best practice says to do this so that when querying, we can let the clients do the timezone conversion. As of right now, everything is stored in UTC even though the python code that writes time is writing local time (datetime.now()).
What we would need to do is run a migration script like the one below, and also update datetime.now() calls to actually use UTC instead of local time.
The longer we wait to do this, the longer/more impactful the migration will be, but it isn't really hurting anything. I don't think the Supabase JS client we're using in Analytics can even be set to read like it's from another timezone anyway.
-- Drop all views because you can't alter a column used in a viewDROPVIEW total_unique_shelves;
DROPVIEW total_shelf_searches;
DROPVIEW cumulative_shelf_counts_daily;
DROPVIEW cumulative_shelf_counts_weekly;
DROPVIEW cumulative_shelf_counts_monthly;
DROPVIEW total_library_searches;
DROPVIEW library_avail_rate;
DROPVIEW library_availability_by_medium;
DROPVIEW hourly_shelf_searches;
DROPVIEW search_type_summary;
-- Modify "books" tableALTERTABLE"books" ALTER COLUMN "date_added" TYPE timestamptz, ALTER COLUMN "date_last_displayed" TYPE timestamptz;
-- Modify "library_searches" tableALTERTABLE"library_searches" ALTER COLUMN "time_start" TYPE timestamptz, ALTER COLUMN "time_complete" TYPE timestamptz;
-- Modify "shelf_searches" tableALTERTABLE"shelf_searches" ALTER COLUMN "time_start" TYPE timestamptz, ALTER COLUMN "time_complete" TYPE timestamptz;
-- Modify "shelves" tableALTERTABLE"shelves" ALTER COLUMN "date_added" TYPE timestamptz, ALTER COLUMN "date_last_searched" TYPE timestamptz;
-- Recreate all views againCREATEVIEWtotal_unique_shelvesASSELECTCOUNT(1) FROM shelves;
CREATEVIEWtotal_shelf_searchesASSELECTCOUNT(1) FROM shelf_searches;
CREATEVIEWcumulative_shelf_counts_dailyAS
WITH
interval_series AS (
SELECT
GENERATE_SERIES(
DATE_TRUNC('day', (SELECTcurrent_date- INTERVAL '90 day')) - INTERVAL '1 day',
DATE_TRUNC('day', CURRENT_DATE),
'1 day'::INTERVAL
) AS date_interval
),
shelf_counts AS (
SELECT
DATE_TRUNC('day', s.date_added) ASdate,
COUNT(s.shelf_id) AS shelf_count
FROM shelves s
GROUP BY1
)
SELECT
TO_CHAR(i.date_interval, 'YYYY-MM-DD') ASdate,
TO_CHAR(i.date_interval, 'MON DD') AS date_axis,
COALESCE(sc.shelf_count, 0) AS shelf_count,
SUM(COALESCE(sc.shelf_count, 0)) OVER (
ORDER BYi.date_interval
) AS"Cumulative Shelf Count"FROM interval_series i
LEFT JOIN shelf_counts sc
ONi.date_interval=sc.dateORDER BYi.date_interval;
CREATEVIEWcumulative_shelf_counts_weeklyAS
WITH
interval_series AS (
SELECT
GENERATE_SERIES(
DATE_TRUNC('week', (SELECTmin(date_added) FROM shelves)) - INTERVAL '1 week',
DATE_TRUNC('week', CURRENT_DATE),
'1 week'::INTERVAL
) AS date_interval
),
shelf_counts AS (
SELECT
DATE_TRUNC('week', s.date_added) ASdate,
COUNT(s.shelf_id) AS shelf_count
FROM shelves s
GROUP BY1
)
SELECT
TO_CHAR(i.date_interval, 'YYYY-MM-DD') ASdate,
TO_CHAR(i.date_interval, 'MON DD') AS date_axis,
COALESCE(sc.shelf_count, 0) AS shelf_count,
SUM(COALESCE(sc.shelf_count, 0)) OVER (
ORDER BYi.date_interval
) AS"Cumulative Shelf Count"FROM interval_series i
LEFT JOIN shelf_counts sc
ONi.date_interval=sc.dateORDER BYi.date_interval;
CREATEVIEWcumulative_shelf_counts_monthlyAS
WITH
interval_series AS (
SELECT
GENERATE_SERIES(
DATE_TRUNC('month', (SELECTmin(date_added) FROM shelves)) - INTERVAL '1 month',
DATE_TRUNC('month', CURRENT_DATE),
'1 month'::INTERVAL
) AS date_interval
),
shelf_counts AS (
SELECT
DATE_TRUNC('month', s.date_added) ASdate,
COUNT(s.shelf_id) AS shelf_count
FROM shelves s
GROUP BY1
)
SELECT
TO_CHAR(i.date_interval, 'YYYY-MM-DD') ASdate,
TO_CHAR(i.date_interval, 'MON') AS date_axis,
COALESCE(sc.shelf_count, 0) AS shelf_count,
SUM(COALESCE(sc.shelf_count, 0)) OVER (
ORDER BYi.date_interval
) AS"Cumulative Shelf Count"FROM interval_series i
LEFT JOIN shelf_counts sc
ONi.date_interval=sc.dateORDER BYi.date_interval;
CREATEVIEWtotal_library_searchesASSELECTCOUNT(1) FROM library_searches;
CREATEVIEWlibrary_avail_rateASSELECT
ROUND(
CAST(
CAST(
SUM(
CASE
WHEN available IS TRUE THEN 1
ELSE 0
END
) AS FLOAT) /COUNT(1) *100ASNUMERIC), 2 ) AS availability_perc
FROM library_searches;
CREATEVIEWlibrary_availability_by_mediumASSELECT
CASE
WHEN is_libby IS TRUE THEN 'Libby'
ELSE 'Book' END AS medium,
SUM(CASE WHEN available IS TRUE THEN 1 ELSE 0 END) AS"Available",
SUM(CASE WHEN available IS NOT TRUE THEN 1 ELSE 0 END) AS"Unavailable"FROM library_searches
GROUP BY1;
CREATEVIEWhourly_shelf_searchesAS
WITH
interval_series AS (
SELECT
GENERATE_SERIES(0, 23, 1) AS hour_interval
),
hourly_searches AS (
SELECT
DATE_PART('hour', time_start) AS hour_interval,
COUNT(1) AS count
FROM shelf_searches
GROUP BY1ORDER BY1ASC
)
SELECTi.hour_intervalAS"Hour",
COALESCE(hs.count, 0) AS"Searches"FROM interval_series i
LEFT JOIN hourly_searches hs
ONi.hour_interval=hs.hour_intervalORDER BY1;
CREATEVIEWsearch_type_summaryASSELECT
search_type as name,
COUNT(1) AS value,
CASE
WHEN search_type ='Shuffle' THEN 'indigo.6'
WHEN search_type ='Search' THEN 'blue.6'
END AS color
FROM shelf_searches
GROUP BY1
The text was updated successfully, but these errors were encountered:
Best practice says to do this so that when querying, we can let the clients do the timezone conversion. As of right now, everything is stored in UTC even though the python code that writes time is writing local time (datetime.now()).
What we would need to do is run a migration script like the one below, and also update datetime.now() calls to actually use UTC instead of local time.
The longer we wait to do this, the longer/more impactful the migration will be, but it isn't really hurting anything. I don't think the Supabase JS client we're using in Analytics can even be set to read like it's from another timezone anyway.
The text was updated successfully, but these errors were encountered: