Skip to content

Commit

Permalink
feat: restore removed pg type (keep backward compat with column type)
Browse files Browse the repository at this point in the history
  • Loading branch information
gfyrag committed Oct 18, 2024
1 parent 5c4fef9 commit 03f5c4f
Show file tree
Hide file tree
Showing 26 changed files with 450 additions and 334 deletions.
95 changes: 95 additions & 0 deletions 11-make-stateless.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
drop trigger "insert_account" on accounts;
drop trigger "update_account" on accounts;
drop trigger "insert_transaction" on transactions;
drop trigger "update_transaction" on transactions;
drop trigger "insert_log" on logs;

alter table moves
add column transactions_id bigint;

alter table transactions
add column inserted_at timestamp without time zone
default (now() at time zone 'utc');

alter table transactions
alter column timestamp set default (now() at time zone 'utc');

DO
$do$
declare
ledger record;
vsql text;
BEGIN
for ledger in select * from _system.ledgers where bucket = current_schema loop
-- create a sequence for transactions by ledger instead of a sequence of the table as we want to have contiguous ids
-- notes: we can still have "holes" on ids since a sql transaction can be reverted after a usage of the sequence

vsql = 'create sequence "transaction_id_' || ledger.id || '" owned by transactions.id';
execute vsql;

vsql = 'select setval("transaction_id_' || ledger.id || '", coalesce((select max(id) + 1 from transactions where ledger = ledger.name), 1)::bigint, false)';
execute vsql;

-- create a sequence for logs by ledger instead of a sequence of the table as we want to have contiguous ids
-- notes: we can still have "holes" on id since a sql transaction can be reverted after a usage of the sequence
vsql = 'create sequence "log_id_' || ledger.id || '" owned by logs.id';
execute vsql;

vsql = 'select setval("log_id_' || ledger.id || '", coalesce((select max(id) + 1 from logs where ledger = ledger.name), 1)::bigint, false)';
execute vsql;

-- enable post commit effective volumes synchronously
vsql = 'create index "pcev_' || ledger.id || '" on moves (accounts_address, asset, effective_date desc) where ledger = ledger.name';
execute vsql;

vsql = 'create trigger "set_effective_volumes_' || ledger.id || '" before insert on moves for each row when (new.ledger = ledger.name) execute procedure set_effective_volumes()';
execute vsql;

vsql = 'create trigger "update_effective_volumes_' || ledger.id || '" after insert on moves for each row when (new.ledger = ledger.name) execute procedure update_effective_volumes()';
execute vsql;

-- logs hash
vsql = 'create trigger "set_log_hash_' || ledger.id || '" before insert on logs for each row when (new.ledger = ledger.name) execute procedure set_log_hash()';
execute vsql;

vsql = 'create trigger "update_account_metadata_history_' || ledger.id || '" after update on "accounts" for each row when (new.ledger = ledger.name) execute procedure update_account_metadata_history()';
execute vsql;

vsql = 'create trigger "insert_account_metadata_history_' || ledger.id || '" after insert on "accounts" for each row when (new.ledger = ledger.name) execute procedure insert_account_metadata_history()';
execute vsql;

vsql = 'create trigger "update_transaction_metadata_history_' || ledger.id || '" after update on "transactions" for each row when (new.ledger = ledger.name) execute procedure update_transaction_metadata_history()';
execute vsql;

vsql = 'create trigger "insert_transaction_metadata_history_' || ledger.id || '" after insert on "transactions" for each row when (new.ledger = ledger.name) execute procedure insert_transaction_metadata_history()';
execute vsql;

vsql = 'create index "transactions_sources_' || ledger.id || '" on transactions using gin (sources jsonb_path_ops) where ledger = ledger.name';
execute vsql;

vsql = 'create index "transactions_destinations_' || ledger.id || '" on transactions using gin (destinations jsonb_path_ops) where ledger = ledger.name';
execute vsql;

vsql = 'create trigger "transaction_set_addresses_' || ledger.id || '" before insert on transactions for each row when (new.ledger = ledger.name) execute procedure set_transaction_addresses()';
execute vsql;

vsql = 'create index "accounts_address_array_' || ledger.id || '" on accounts using gin (address_array jsonb_ops) where ledger = ledger.name';
execute vsql;

vsql = 'create index "accounts_address_array_length_' || ledger.id || '" on accounts (jsonb_array_length(address_array)) where ledger = ledger.name';
execute vsql;

vsql = 'create trigger "accounts_set_address_array_' || ledger.id || '" before insert on accounts for each row when (new.ledger = ledger.name) execute procedure set_address_array_for_account()';
execute vsql;

vsql = 'create index "transactions_sources_arrays_' || ledger.id || '" on transactions using gin (sources_arrays jsonb_path_ops) where ledger = ledger.name';
execute vsql;

vsql = 'create index "transactions_destinations_arrays_' || ledger.id || '" on transactions using gin (destinations_arrays jsonb_path_ops) where ledger = ledger.name';
execute vsql;

vsql = 'create trigger "transaction_set_addresses_segments_' || ledger.id || '" before insert on "transactions" for each row when (new.ledger = ledger.name) execute procedure set_transaction_addresses_segments()';
execute vsql;
end loop;
END
$do$;

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ select distinct on (ledger, accounts_address, asset)
ledger,
accounts_address,
asset,
(moves.post_commit_volumes->>'input')::numeric as input,
(moves.post_commit_volumes->>'output')::numeric as output
(moves.post_commit_volumes).inputs as input,
(moves.post_commit_volumes).outputs as output
from (
select distinct (ledger, accounts_address, asset)
ledger,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ as
$$
begin
new.post_commit_effective_volumes = coalesce((
select json_build_object(
'input', (post_commit_effective_volumes->>'input')::numeric + case when new.is_source then 0 else new.amount end,
'output', (post_commit_effective_volumes->>'output')::numeric + case when new.is_source then new.amount else 0 end
select (
(post_commit_effective_volumes).inputs + case when new.is_source then 0 else new.amount end,
(post_commit_effective_volumes).outputs + case when new.is_source then new.amount else 0 end
)
from moves
where accounts_address = new.accounts_address
Expand All @@ -17,9 +17,9 @@ begin
and (effective_date < new.effective_date or (effective_date = new.effective_date and seq < new.seq))
order by effective_date desc, seq desc
limit 1
), json_build_object(
'input', case when new.is_source then 0 else new.amount end,
'output', case when new.is_source then new.amount else 0 end
), (
case when new.is_source then 0 else new.amount end,
case when new.is_source then new.amount else 0 end
));

return new;
Expand All @@ -34,9 +34,9 @@ as
$$
begin
update moves
set post_commit_effective_volumes = json_build_object(
'input', (post_commit_effective_volumes->>'input')::numeric + case when new.is_source then 0 else new.amount end,
'output', (post_commit_effective_volumes->>'output')::numeric + case when new.is_source then new.amount else 0 end
set post_commit_effective_volumes = (
(post_commit_effective_volumes).inputs + case when new.is_source then 0 else new.amount end,
(post_commit_effective_volumes).outputs + case when new.is_source then new.amount else 0 end
)
where accounts_address = new.accounts_address
and asset = new.asset
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
alter table "moves"
alter column post_commit_volumes drop not null,
alter column post_commit_effective_volumes drop not null
;
Loading

0 comments on commit 03f5c4f

Please sign in to comment.