Skip to content

Commit

Permalink
debug the tests
Browse files Browse the repository at this point in the history
  • Loading branch information
akuzm committed Dec 19, 2024
1 parent 9596392 commit 0009ebd
Show file tree
Hide file tree
Showing 5 changed files with 50 additions and 39 deletions.
6 changes: 4 additions & 2 deletions .github/gh_matrix_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

import json
import os
import random
import subprocess
from ci_settings import (
PG14_EARLIEST,
Expand Down Expand Up @@ -309,11 +310,12 @@ def macos_config(overrides):
sys.exit(1)

if tests:
to_run = " ".join(random.shuffle(list(tests) * 20))
m["include"].append(
build_debug_config(
{
"coverage": False,
"installcheck_args": f'TESTS="{" ".join(list(tests) * 20)}"',
"installcheck_args": f'TESTS="{to_run}"',
"name": "Flaky Check Debug",
"pg": PG16_LATEST,
"pginstallcheck": False,
Expand All @@ -324,7 +326,7 @@ def macos_config(overrides):
build_debug_config(
{
"coverage": False,
"installcheck_args": f'TESTS="{" ".join(list(tests) * 20)}"',
"installcheck_args": f'TESTS="{to_run}"',
"name": "Flaky Check Debug",
"pg": PG17_LATEST,
"pginstallcheck": False,
Expand Down
56 changes: 30 additions & 26 deletions tsl/test/expected/compression_qualpushdown.out
Original file line number Diff line number Diff line change
Expand Up @@ -274,6 +274,7 @@ EXPLAIN (costs off) SELECT * FROM pushdown_relabel WHERE dev_vc = 'varchar'::cha
Filter: ((dev_vc)::bpchar = 'varchar '::character(10))
(4 rows)

RESET enable_seqscan;
-- github issue #5286
CREATE TABLE deleteme AS
SELECT generate_series AS timestamp, 1 AS segment, 0 AS data
Expand All @@ -299,18 +300,20 @@ SELECT compress_chunk(i) FROM show_chunks('deleteme') i;
(1 row)

VACUUM ANALYZE deleteme;
EXPLAIN (costs off) SELECT sum(data) FROM deleteme WHERE segment::text like '%4%';
QUERY PLAN
-----------------------------------------------------------------------------
Finalize Aggregate
-> Gather
EXPLAIN (analyze, timing off, summary off) SELECT sum(data) FROM deleteme WHERE segment::text like '%4%';
QUERY PLAN
---------------------------------------------------------------------------------------------------------------------------------------------
Finalize Aggregate (cost=3235.23..3235.24 rows=1 width=8) (actual rows=1 loops=1)
-> Gather (cost=3235.12..3235.23 rows=1 width=8) (actual rows=2 loops=1)
Workers Planned: 1
-> Partial Aggregate
-> Parallel Append
-> Custom Scan (DecompressChunk) on _hyper_7_8_chunk
-> Parallel Seq Scan on compress_hyper_8_9_chunk
Workers Launched: 1
-> Partial Aggregate (cost=2235.12..2235.13 rows=1 width=8) (actual rows=1 loops=2)
-> Parallel Append (cost=0.06..1790.12 rows=178000 width=4) (actual rows=0 loops=2)
-> Custom Scan (DecompressChunk) on _hyper_7_8_chunk (cost=0.06..1790.12 rows=178000 width=4) (actual rows=0 loops=1)
-> Parallel Seq Scan on compress_hyper_8_9_chunk (cost=0.00..10.12 rows=178 width=40) (actual rows=0 loops=1)
Filter: ((segment)::text ~~ '%4%'::text)
(8 rows)
Rows Removed by Filter: 303
(10 rows)

EXPLAIN (costs off) SELECT sum(data) FROM deleteme WHERE '4' = segment::text;
QUERY PLAN
Expand Down Expand Up @@ -531,35 +534,35 @@ EXPLAIN (costs off) SELECT * FROM svf_pushdown WHERE c_name = CURRENT_SCHEMA;
(5 rows)

EXPLAIN (costs off) SELECT * FROM svf_pushdown WHERE c_bool;
QUERY PLAN
----------------------------------------------------------------------------------------------------------------------
QUERY PLAN
-----------------------------------------------------
Custom Scan (DecompressChunk) on _hyper_11_12_chunk
-> Index Scan using compress_hyper_12_13_chunk_c_date_c_time_c_timetz_c_timesta_idx on compress_hyper_12_13_chunk
Index Cond: (c_bool = true)
-> Seq Scan on compress_hyper_12_13_chunk
Filter: c_bool
(3 rows)

EXPLAIN (costs off) SELECT * FROM svf_pushdown WHERE c_bool = true;
QUERY PLAN
----------------------------------------------------------------------------------------------------------------------
QUERY PLAN
-----------------------------------------------------
Custom Scan (DecompressChunk) on _hyper_11_12_chunk
-> Index Scan using compress_hyper_12_13_chunk_c_date_c_time_c_timetz_c_timesta_idx on compress_hyper_12_13_chunk
Index Cond: (c_bool = true)
-> Seq Scan on compress_hyper_12_13_chunk
Filter: c_bool
(3 rows)

EXPLAIN (costs off) SELECT * FROM svf_pushdown WHERE c_bool = false;
QUERY PLAN
----------------------------------------------------------------------------------------------------------------------
QUERY PLAN
-----------------------------------------------------
Custom Scan (DecompressChunk) on _hyper_11_12_chunk
-> Index Scan using compress_hyper_12_13_chunk_c_date_c_time_c_timetz_c_timesta_idx on compress_hyper_12_13_chunk
Index Cond: (c_bool = false)
-> Seq Scan on compress_hyper_12_13_chunk
Filter: (NOT c_bool)
(3 rows)

EXPLAIN (costs off) SELECT * FROM svf_pushdown WHERE NOT c_bool;
QUERY PLAN
----------------------------------------------------------------------------------------------------------------------
QUERY PLAN
-----------------------------------------------------
Custom Scan (DecompressChunk) on _hyper_11_12_chunk
-> Index Scan using compress_hyper_12_13_chunk_c_date_c_time_c_timetz_c_timesta_idx on compress_hyper_12_13_chunk
Index Cond: (c_bool = false)
-> Seq Scan on compress_hyper_12_13_chunk
Filter: (NOT c_bool)
(3 rows)

-- current_query() is not a sqlvaluefunction and volatile so should not be pushed down
Expand All @@ -585,3 +588,4 @@ LATERAL(
--
(1 row)

DROP TABLE svf_pushdown;
21 changes: 12 additions & 9 deletions tsl/test/expected/compression_sorted_merge_distinct.out
Original file line number Diff line number Diff line change
Expand Up @@ -30,17 +30,20 @@ select count(compress_chunk(x, true)) from show_chunks('t') x;
(1 row)

analyze t;
explain (costs off) select * from t order by ts;
QUERY PLAN
-----------------------------------------------------------------------
Gather Merge
explain (analyze, timing off, summary off) select * from t order by ts;
QUERY PLAN
-----------------------------------------------------------------------------------------------------------------------------------------------
Gather Merge (cost=152820.15..294845.15 rows=1235000 width=24) (actual rows=840000 loops=1)
Workers Planned: 1
-> Sort
Workers Launched: 1
-> Sort (cost=151820.14..154907.64 rows=1235000 width=24) (actual rows=420000 loops=2)
Sort Key: _hyper_1_1_chunk.ts
-> Parallel Append
-> Custom Scan (DecompressChunk) on _hyper_1_1_chunk
-> Parallel Seq Scan on compress_hyper_2_2_chunk
(7 rows)
Sort Method: external merge Disk: 15128kB
Worker 0: Sort Method: external merge Disk: 12888kB
-> Parallel Append (cost=0.03..12388.35 rows=1235000 width=24) (actual rows=420000 loops=2)
-> Custom Scan (DecompressChunk) on _hyper_1_1_chunk (cost=0.03..12388.35 rows=1235000 width=24) (actual rows=420000 loops=2)
-> Parallel Seq Scan on compress_hyper_2_2_chunk (cost=0.00..38.35 rows=1235 width=92) (actual rows=1050 loops=2)
(10 rows)

explain (costs off) select * from t where low_card = 1 order by ts;
QUERY PLAN
Expand Down
4 changes: 3 additions & 1 deletion tsl/test/sql/compression_qualpushdown.sql
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ EXPLAIN (costs off) SELECT * FROM pushdown_relabel WHERE dev_vc = 'varchar';
EXPLAIN (costs off) SELECT * FROM pushdown_relabel WHERE dev_c = 'char';
EXPLAIN (costs off) SELECT * FROM pushdown_relabel WHERE dev_vc = 'varchar' AND dev_c = 'char';
EXPLAIN (costs off) SELECT * FROM pushdown_relabel WHERE dev_vc = 'varchar'::char(10) AND dev_c = 'char'::varchar;
RESET enable_seqscan;

-- github issue #5286
CREATE TABLE deleteme AS
Expand All @@ -138,7 +139,7 @@ ALTER TABLE deleteme SET (

SELECT compress_chunk(i) FROM show_chunks('deleteme') i;
VACUUM ANALYZE deleteme;
EXPLAIN (costs off) SELECT sum(data) FROM deleteme WHERE segment::text like '%4%';
EXPLAIN (analyze, timing off, summary off) SELECT sum(data) FROM deleteme WHERE segment::text like '%4%';
EXPLAIN (costs off) SELECT sum(data) FROM deleteme WHERE '4' = segment::text;

CREATE TABLE deleteme_with_bytea(time bigint NOT NULL, bdata bytea);
Expand Down Expand Up @@ -200,3 +201,4 @@ LATERAL(
EXISTS (SELECT FROM meta) LIMIT 1
) l;

DROP TABLE svf_pushdown;
2 changes: 1 addition & 1 deletion tsl/test/sql/compression_sorted_merge_distinct.sql
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ alter table t set (timescaledb.compress = true, timescaledb.compress_segmentby =
select count(compress_chunk(x, true)) from show_chunks('t') x;
analyze t;

explain (costs off) select * from t order by ts;
explain (analyze, timing off, summary off) select * from t order by ts;
explain (costs off) select * from t where low_card = 1 order by ts;
explain (costs off) select * from t where high_card = 1 order by ts;
explain (costs off) select * from t where low_card = 1 and high_card = 1 order by ts;
Expand Down

0 comments on commit 0009ebd

Please sign in to comment.