Skip to content

Commit

Permalink
Add compression tuple filtering information to EXPLAIN
Browse files Browse the repository at this point in the history
Show information about filtered batches to EXPLAIN ANALYZE output.
  • Loading branch information
svenklemm committed Jul 15, 2024
1 parent 399f6c6 commit ccc3e11
Show file tree
Hide file tree
Showing 8 changed files with 44 additions and 21 deletions.
1 change: 1 addition & 0 deletions src/nodes/chunk_dispatch/chunk_dispatch.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ typedef struct ChunkDispatchState
ResultRelInfo *rri;
/* flag to represent dropped attributes */
bool is_dropped_attr_exists;
int64 batches_filtered;
int64 batches_decompressed;
int64 tuples_decompressed;

Expand Down
3 changes: 3 additions & 0 deletions src/nodes/hypertable_modify.c
Original file line number Diff line number Diff line change
Expand Up @@ -240,10 +240,13 @@ hypertable_modify_explain(CustomScanState *node, List *ancestors, ExplainState *
foreach (lc, chunk_dispatch_states)
{
ChunkDispatchState *cds = (ChunkDispatchState *) lfirst(lc);
state->batches_filtered += cds->batches_filtered;
state->batches_decompressed += cds->batches_decompressed;
state->tuples_decompressed += cds->tuples_decompressed;
}
}
if (state->batches_filtered > 0)
ExplainPropertyInteger("Batches filtered", NULL, state->batches_filtered, es);
if (state->batches_decompressed > 0)
ExplainPropertyInteger("Batches decompressed", NULL, state->batches_decompressed, es);
if (state->tuples_decompressed > 0)
Expand Down
7 changes: 7 additions & 0 deletions src/nodes/hypertable_modify.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,12 @@ typedef struct HypertableModifyPath
CustomPath cpath;
} HypertableModifyPath;

/*
* State for the hypertable_modify custom scan node.
*
* This struct definition is also used in ts_stat_statements, so any new fields
* should only be added at the end of the struct.
*/
typedef struct HypertableModifyState
{
CustomScanState cscan_state;
Expand All @@ -25,6 +31,7 @@ typedef struct HypertableModifyState
Snapshot snapshot;
int64 tuples_decompressed;
int64 batches_decompressed;
int64 batches_filtered;
} HypertableModifyState;

extern void ts_hypertable_modify_fixup_tlist(Plan *plan);
Expand Down
1 change: 1 addition & 0 deletions tsl/src/compression/compression.h
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,7 @@ const CompressionAlgorithmDefinition *algorithm_definition(CompressionAlgorithm

struct decompress_batches_stats
{
int64 batches_filtered;
int64 batches_decompressed;
int64 tuples_decompressed;
};
14 changes: 6 additions & 8 deletions tsl/src/compression/compression_dml.c
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ decompress_batches_for_insert(const ChunkInsertState *cis, TupleTableSlot *slot)
cis->cds->skip_current_tuple = true;
}

cis->cds->batches_filtered += stats.batches_filtered;
cis->cds->batches_decompressed += stats.batches_decompressed;
cis->cds->tuples_decompressed += stats.tuples_decompressed;

Expand Down Expand Up @@ -328,6 +329,7 @@ decompress_batches_for_update_delete(HypertableModifyState *ht_state, Chunk *chu
filter = lfirst(lc);
pfree(filter);
}
ht_state->batches_filtered += stats.batches_filtered;
ht_state->batches_decompressed += stats.batches_decompressed;
ht_state->tuples_decompressed += stats.tuples_decompressed;

Expand Down Expand Up @@ -358,10 +360,7 @@ decompress_batches_indexscan(Relation in_rel, Relation out_rel, Relation index_r
int num_segmentby_filtered_rows = 0;
int num_heap_filtered_rows = 0;

struct decompress_batches_stats stats = {
.batches_decompressed = 0,
.tuples_decompressed = 0,
};
struct decompress_batches_stats stats = { 0 };

/* TODO: Optimization by reusing the index scan while working on a single chunk */
IndexScanDesc scan = index_beginscan(in_rel, index_rel, snapshot, num_index_scankeys, 0);
Expand Down Expand Up @@ -442,6 +441,7 @@ decompress_batches_indexscan(Relation in_rel, Relation out_rel, Relation index_r
skip_current_tuple))
{
row_decompressor_reset(&decompressor);
stats.batches_filtered++;
continue;
}

Expand Down Expand Up @@ -524,10 +524,7 @@ decompress_batches_seqscan(Relation in_rel, Relation out_rel, Snapshot snapshot,
TableScanDesc scan = table_beginscan(in_rel, snapshot, num_scankeys, scankeys);
int num_scanned_rows = 0;
int num_filtered_rows = 0;
struct decompress_batches_stats stats = {
.batches_decompressed = 0,
.tuples_decompressed = 0,
};
struct decompress_batches_stats stats = { 0 };

while (table_scan_getnextslot(scan, ForwardScanDirection, slot))
{
Expand Down Expand Up @@ -586,6 +583,7 @@ decompress_batches_seqscan(Relation in_rel, Relation out_rel, Snapshot snapshot,
skip_current_tuple))
{
row_decompressor_reset(&decompressor);
stats.batches_filtered++;
continue;
}

Expand Down
3 changes: 2 additions & 1 deletion tsl/test/shared/expected/compress_unique_index.out
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,11 @@ ERROR: duplicate key value violates unique constraint "_hyper_X_X_chunk_uniq_ex
EXPLAIN (analyze,costs off,summary off,timing off) INSERT INTO compress_unique VALUES ('2000-01-01','m1','c2','2000-01-02');
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 1
-> Insert on compress_unique (actual rows=0 loops=1)
-> Custom Scan (ChunkDispatch) (actual rows=1 loops=1)
-> Result (actual rows=1 loops=1)
(4 rows)
(5 rows)

-- should decompress no batches
EXPLAIN (analyze,costs off,summary off,timing off) INSERT INTO compress_unique VALUES ('2000-01-01','m1','c3','2000-01-02');
Expand Down
30 changes: 20 additions & 10 deletions tsl/test/shared/expected/compression_dml.out
Original file line number Diff line number Diff line change
Expand Up @@ -260,27 +260,30 @@ QUERY PLAN
BEGIN; :ANALYZE UPDATE lazy_decompress SET value = 3.14 WHERE value = 0; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 6
-> Update on lazy_decompress (actual rows=0 loops=1)
Update on _hyper_X_X_chunk lazy_decompress_1
-> Result (actual rows=0 loops=1)
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=0 loops=1)
Filter: (value = '0'::double precision)
(6 rows)
(7 rows)

BEGIN; :ANALYZE UPDATE lazy_decompress SET value = 3.14 WHERE value = 0 AND device='d1'; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 6
-> Update on lazy_decompress (actual rows=0 loops=1)
Update on _hyper_X_X_chunk lazy_decompress_1
-> Result (actual rows=0 loops=1)
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=0 loops=1)
Filter: ((value = '0'::double precision) AND (device = 'd1'::text))
(6 rows)
(7 rows)

-- 1 batch decompression
BEGIN; :ANALYZE UPDATE lazy_decompress SET value = 3.14 WHERE value = 2300; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 5
Batches decompressed: 1
Tuples decompressed: 1000
-> Update on lazy_decompress (actual rows=0 loops=1)
Expand All @@ -289,11 +292,12 @@ QUERY PLAN
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=1 loops=1)
Filter: (value = '2300'::double precision)
Rows Removed by Filter: 999
(9 rows)
(10 rows)

BEGIN; :ANALYZE UPDATE lazy_decompress SET value = 3.14 WHERE value > 3100 AND value < 3200; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 5
Batches decompressed: 1
Tuples decompressed: 1000
-> Update on lazy_decompress (actual rows=0 loops=1)
Expand All @@ -302,11 +306,12 @@ QUERY PLAN
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=99 loops=1)
Filter: ((value > '3100'::double precision) AND (value < '3200'::double precision))
Rows Removed by Filter: 901
(9 rows)
(10 rows)

BEGIN; :ANALYZE UPDATE lazy_decompress SET value = 3.14 WHERE value BETWEEN 3100 AND 3200; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 5
Batches decompressed: 1
Tuples decompressed: 1000
-> Update on lazy_decompress (actual rows=0 loops=1)
Expand All @@ -315,7 +320,7 @@ QUERY PLAN
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=101 loops=1)
Filter: ((value >= '3100'::double precision) AND (value <= '3200'::double precision))
Rows Removed by Filter: 899
(9 rows)
(10 rows)

-- check GUC is working, should be 6 batches and 6000 tuples decompresed
SET timescaledb.enable_dml_decompression_tuple_filtering TO off;
Expand All @@ -337,57 +342,62 @@ RESET timescaledb.enable_dml_decompression_tuple_filtering;
BEGIN; :ANALYZE DELETE FROM lazy_decompress WHERE value = 0; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 6
-> Delete on lazy_decompress (actual rows=0 loops=1)
Delete on _hyper_X_X_chunk lazy_decompress_1
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=0 loops=1)
Filter: (value = '0'::double precision)
(5 rows)
(6 rows)

BEGIN; :ANALYZE DELETE FROM lazy_decompress WHERE value = 0 AND device='d1'; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 6
-> Delete on lazy_decompress (actual rows=0 loops=1)
Delete on _hyper_X_X_chunk lazy_decompress_1
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=0 loops=1)
Filter: ((value = '0'::double precision) AND (device = 'd1'::text))
(5 rows)
(6 rows)

-- 1 batch decompression
BEGIN; :ANALYZE DELETE FROM lazy_decompress WHERE value = 2300; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 5
Batches decompressed: 1
Tuples decompressed: 1000
-> Delete on lazy_decompress (actual rows=0 loops=1)
Delete on _hyper_X_X_chunk lazy_decompress_1
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=1 loops=1)
Filter: (value = '2300'::double precision)
Rows Removed by Filter: 999
(8 rows)
(9 rows)

BEGIN; :ANALYZE DELETE FROM lazy_decompress WHERE value > 3100 AND value < 3200; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 5
Batches decompressed: 1
Tuples decompressed: 1000
-> Delete on lazy_decompress (actual rows=0 loops=1)
Delete on _hyper_X_X_chunk lazy_decompress_1
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=99 loops=1)
Filter: ((value > '3100'::double precision) AND (value < '3200'::double precision))
Rows Removed by Filter: 901
(8 rows)
(9 rows)

BEGIN; :ANALYZE DELETE FROM lazy_decompress WHERE value BETWEEN 3100 AND 3200; ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 5
Batches decompressed: 1
Tuples decompressed: 1000
-> Delete on lazy_decompress (actual rows=0 loops=1)
Delete on _hyper_X_X_chunk lazy_decompress_1
-> Seq Scan on _hyper_X_X_chunk lazy_decompress_1 (actual rows=101 loops=1)
Filter: ((value >= '3100'::double precision) AND (value <= '3200'::double precision))
Rows Removed by Filter: 899
(8 rows)
(9 rows)

-- check GUC is working, should be 6 batches and 6000 tuples decompresed
SET timescaledb.enable_dml_decompression_tuple_filtering TO off;
Expand Down
6 changes: 4 additions & 2 deletions tsl/test/shared/expected/decompress_tracking.out
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,12 @@ QUERY PLAN
BEGIN; :EXPLAIN_ANALYZE INSERT INTO decompress_tracking SELECT '2020-01-01 1:30','d1',random(); ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 1
-> Insert on decompress_tracking (actual rows=0 loops=1)
-> Custom Scan (ChunkDispatch) (actual rows=1 loops=1)
-> Subquery Scan on "*SELECT*" (actual rows=1 loops=1)
-> Result (actual rows=1 loops=1)
(5 rows)
(6 rows)

BEGIN; :EXPLAIN_ANALYZE INSERT INTO decompress_tracking SELECT '2020-01-01','d2',random(); ROLLBACK;
QUERY PLAN
Expand All @@ -121,10 +122,11 @@ QUERY PLAN
BEGIN; :EXPLAIN_ANALYZE INSERT INTO decompress_tracking (VALUES ('2020-01-01 1:30','d1',random()),('2020-01-01 1:30','d2',random())); ROLLBACK;
QUERY PLAN
Custom Scan (HypertableModify) (actual rows=0 loops=1)
Batches filtered: 2
-> Insert on decompress_tracking (actual rows=0 loops=1)
-> Custom Scan (ChunkDispatch) (actual rows=2 loops=1)
-> Values Scan on "*VALUES*" (actual rows=2 loops=1)
(4 rows)
(5 rows)

-- test prepared statements EXPLAIN still works after execution
SET plan_cache_mode TO force_generic_plan;
Expand Down

0 comments on commit ccc3e11

Please sign in to comment.