mirror of
https://github.com/MariaDB/server.git
synced 2025-01-16 03:52:35 +01:00
ANALYZE FORMAT=JSON: Backport block-nl-join.r_unpack_time_ms from 11.0 +fix MDEV-30830.
Also fix it to work with hashed join (MDEV-30830). Reviewed by: Monty <monty@mariadb.org>
This commit is contained in:
parent
18342cd5e1
commit
0269d82d53
16 changed files with 210 additions and 6 deletions
|
@ -1,3 +1,3 @@
|
|||
# The time on ANALYSE FORMAT=JSON is rather variable
|
||||
|
||||
--replace_regex /("(r_total_time_ms|r_table_time_ms|r_other_time_ms|r_buffer_size|r_filling_time_ms)": )[^, \n]*/\1"REPLACED"/
|
||||
--replace_regex /("(r_[a-z_]*_time_ms|r_buffer_size)": )[^, \n]*/\1"REPLACED"/
|
||||
|
|
|
@ -153,6 +153,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 20,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 60
|
||||
}
|
||||
}
|
||||
|
@ -196,6 +197,7 @@ ANALYZE
|
|||
"attached_condition": "tbl1.c > tbl2.c",
|
||||
"r_loops": 20,
|
||||
"r_filtered": 15.83333333,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 60
|
||||
}
|
||||
}
|
||||
|
@ -658,6 +660,7 @@ ANALYZE
|
|||
"attached_condition": "<in_optimizer>(t2.b,t2.b in (subquery#2))",
|
||||
"r_loops": 2,
|
||||
"r_filtered": null,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 0
|
||||
},
|
||||
"subqueries": [
|
||||
|
@ -750,6 +753,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 2,
|
||||
"r_filtered": null,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 0
|
||||
},
|
||||
"subqueries": [
|
||||
|
@ -784,6 +788,7 @@ ANALYZE
|
|||
"attached_condition": "t2.f2 = t3.f3",
|
||||
"r_loops": 0,
|
||||
"r_filtered": null,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": null
|
||||
}
|
||||
}
|
||||
|
@ -921,6 +926,7 @@ ANALYZE
|
|||
"attached_condition": "t10.b = t11.b",
|
||||
"r_loops": 100,
|
||||
"r_filtered": 0.424285714,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 700
|
||||
}
|
||||
}
|
||||
|
@ -973,6 +979,7 @@ ANALYZE
|
|||
"attached_condition": "t10.b = t11.b",
|
||||
"r_loops": 100,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 2.97
|
||||
}
|
||||
}
|
||||
|
|
89
mysql-test/main/analyze_format_json_timings.result
Normal file
89
mysql-test/main/analyze_format_json_timings.result
Normal file
|
@ -0,0 +1,89 @@
|
|||
#
|
||||
# MDEV-30830: ANALYZE FORMAT=JSON: r_unpack_time_ms is empty for the hashed joins
|
||||
#
|
||||
#
|
||||
# First, check a regular BNL-join
|
||||
#
|
||||
create table t1 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t1 select seq, seq/3 from seq_0_to_99;
|
||||
create table t2 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t2 select seq, seq/5 from seq_0_to_99;
|
||||
set @js='$out';
|
||||
set @out=(select json_extract(@js,'$**.block-nl-join.r_unpack_time_ms'));
|
||||
select cast(json_extract(@out,'$[0]') as DOUBLE) > 0;
|
||||
cast(json_extract(@out,'$[0]') as DOUBLE) > 0
|
||||
1
|
||||
drop table t1,t2;
|
||||
#
|
||||
# Now, check the hashed, BNL-H join
|
||||
#
|
||||
create table t1 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t1 select seq, seq/3 from seq_0_to_499;
|
||||
create table t2 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t2 select seq, seq/5 from seq_0_to_499;
|
||||
set @tmp=@@join_cache_level, join_cache_level=6;
|
||||
select '$out' as X;
|
||||
X
|
||||
{
|
||||
"query_block": {
|
||||
"select_id": 1,
|
||||
"r_loops": 1,
|
||||
"r_total_time_ms": "REPLACED",
|
||||
"table": {
|
||||
"table_name": "t1",
|
||||
"access_type": "ALL",
|
||||
"r_loops": 1,
|
||||
"rows": 500,
|
||||
"r_rows": 500,
|
||||
"r_table_time_ms": "REPLACED",
|
||||
"r_other_time_ms": "REPLACED",
|
||||
"filtered": 100,
|
||||
"r_filtered": 100,
|
||||
"attached_condition": "t1.a < 700 and t1.b is not null"
|
||||
},
|
||||
"block-nl-join": {
|
||||
"table": {
|
||||
"table_name": "t2",
|
||||
"access_type": "hash_ALL",
|
||||
"key": "#hash#$hj",
|
||||
"key_length": "5",
|
||||
"used_key_parts": ["b"],
|
||||
"ref": ["test.t1.b"],
|
||||
"r_loops": 1,
|
||||
"rows": 500,
|
||||
"r_rows": 500,
|
||||
"r_table_time_ms": "REPLACED",
|
||||
"r_other_time_ms": "REPLACED",
|
||||
"filtered": 100,
|
||||
"r_filtered": 20,
|
||||
"attached_condition": "t2.a < 100"
|
||||
},
|
||||
"buffer_type": "flat",
|
||||
"buffer_size": "18Kb",
|
||||
"join_type": "BNLH",
|
||||
"attached_condition": "t2.b = t1.b",
|
||||
"r_loops": 500,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 0.594
|
||||
}
|
||||
}
|
||||
}
|
||||
set @out=(select json_extract(@js,'$**.block-nl-join.r_unpack_time_ms'));
|
||||
select cast(json_extract(@out,'$[0]') as DOUBLE) > 0;
|
||||
cast(json_extract(@out,'$[0]') as DOUBLE) > 0
|
||||
1
|
||||
set join_cache_level=@tmp;
|
||||
drop table t1, t2;
|
77
mysql-test/main/analyze_format_json_timings.test
Normal file
77
mysql-test/main/analyze_format_json_timings.test
Normal file
|
@ -0,0 +1,77 @@
|
|||
#
|
||||
# Tests to check that r_something_time_ms is non-zero in
|
||||
# ANALYZE FORMAT=JSON <statement>
|
||||
#
|
||||
--source include/default_optimizer_switch.inc
|
||||
--source include/have_sequence.inc
|
||||
|
||||
# The tests here are large so that we get non-zero timings
|
||||
--source include/big_test.inc
|
||||
|
||||
--echo #
|
||||
--echo # MDEV-30830: ANALYZE FORMAT=JSON: r_unpack_time_ms is empty for the hashed joins
|
||||
--echo #
|
||||
|
||||
--echo #
|
||||
--echo # First, check a regular BNL-join
|
||||
--echo #
|
||||
create table t1 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t1 select seq, seq/3 from seq_0_to_99;
|
||||
|
||||
create table t2 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t2 select seq, seq/5 from seq_0_to_99;
|
||||
|
||||
let $out=`
|
||||
analyze format=json
|
||||
select * from t1, t2
|
||||
where
|
||||
t1.a < 700 and
|
||||
t2.a < 100
|
||||
and t1.b=t2.b
|
||||
`;
|
||||
|
||||
evalp set @js='$out';
|
||||
set @out=(select json_extract(@js,'$**.block-nl-join.r_unpack_time_ms'));
|
||||
select cast(json_extract(@out,'$[0]') as DOUBLE) > 0;
|
||||
drop table t1,t2;
|
||||
|
||||
--echo #
|
||||
--echo # Now, check the hashed, BNL-H join
|
||||
--echo #
|
||||
create table t1 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t1 select seq, seq/3 from seq_0_to_499;
|
||||
|
||||
create table t2 (
|
||||
a int,
|
||||
b int
|
||||
);
|
||||
insert into t2 select seq, seq/5 from seq_0_to_499;
|
||||
set @tmp=@@join_cache_level, join_cache_level=6;
|
||||
|
||||
let $out=`
|
||||
analyze format=json
|
||||
select * from t1, t2
|
||||
where
|
||||
t1.a < 700 and
|
||||
t2.a < 100
|
||||
and t1.b=t2.b
|
||||
`;
|
||||
|
||||
--source include/analyze-format.inc
|
||||
evalp select '$out' as X;
|
||||
|
||||
set @out=(select json_extract(@js,'$**.block-nl-join.r_unpack_time_ms'));
|
||||
select cast(json_extract(@out,'$[0]') as DOUBLE) > 0;
|
||||
|
||||
set join_cache_level=@tmp;
|
||||
drop table t1, t2;
|
||||
|
|
@ -443,6 +443,7 @@ ANALYZE
|
|||
"attached_condition": "t3.a = t0.a",
|
||||
"r_loops": 10,
|
||||
"r_filtered": 10,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 10
|
||||
}
|
||||
}
|
||||
|
@ -520,6 +521,7 @@ ANALYZE
|
|||
"attached_condition": "t5.a = t6.a",
|
||||
"r_loops": 4,
|
||||
"r_filtered": 21.42857143,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 7
|
||||
}
|
||||
}
|
||||
|
|
|
@ -336,6 +336,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 2,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 2
|
||||
}
|
||||
}
|
||||
|
@ -374,6 +375,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 2,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 2
|
||||
}
|
||||
}
|
||||
|
@ -441,6 +443,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 2,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 2
|
||||
}
|
||||
}
|
||||
|
@ -479,6 +482,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 2,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 2
|
||||
}
|
||||
}
|
||||
|
|
|
@ -455,6 +455,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 3,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 3
|
||||
}
|
||||
}
|
||||
|
@ -493,6 +494,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 2,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 3
|
||||
}
|
||||
}
|
||||
|
@ -559,6 +561,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 3,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 3
|
||||
}
|
||||
}
|
||||
|
@ -597,6 +600,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 2,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 3
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1546,6 +1546,7 @@ ANALYZE
|
|||
"mrr_type": "Rowid-ordered scan",
|
||||
"r_loops": 10,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 1
|
||||
}
|
||||
}
|
||||
|
|
|
@ -397,6 +397,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 3,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 3
|
||||
}
|
||||
}
|
||||
|
@ -482,6 +483,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 3,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 3
|
||||
}
|
||||
}
|
||||
|
|
|
@ -428,6 +428,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 5,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 7
|
||||
}
|
||||
}
|
||||
|
@ -513,6 +514,7 @@ ANALYZE
|
|||
"join_type": "BNL",
|
||||
"r_loops": 5,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 7
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3687,6 +3687,7 @@ ANALYZE
|
|||
"attached_condition": "a.atp = 1",
|
||||
"r_loops": 3,
|
||||
"r_filtered": 100,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 1
|
||||
},
|
||||
"block-nl-join": {
|
||||
|
@ -3713,6 +3714,7 @@ ANALYZE
|
|||
"attached_condition": "fi.fh in (6311439873746261694,-397087483897438286,8518228073041491534,-5420422472375069774)",
|
||||
"r_loops": 3,
|
||||
"r_filtered": 40,
|
||||
"r_unpack_time_ms": "REPLACED",
|
||||
"r_effective_rows": 26.66666667
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1898,6 +1898,9 @@ void Explain_table_access::print_explain_json(Explain_query *query,
|
|||
writer->add_double(jbuf_tracker.get_filtered_after_where()*100.0);
|
||||
else
|
||||
writer->add_null();
|
||||
|
||||
writer->add_member("r_unpack_time_ms");
|
||||
writer->add_double(jbuf_unpack_tracker.get_time_ms());
|
||||
/*
|
||||
effective_rows is average number of matches we got for an incoming
|
||||
row. The row is stored in the join buffer and then is read
|
||||
|
|
|
@ -722,7 +722,7 @@ public:
|
|||
class Explain_table_access : public Sql_alloc
|
||||
{
|
||||
public:
|
||||
Explain_table_access(MEM_ROOT *root) :
|
||||
Explain_table_access(MEM_ROOT *root, bool timed) :
|
||||
derived_select_number(0),
|
||||
non_merged_sjm_number(0),
|
||||
extra_tags(root),
|
||||
|
@ -735,6 +735,7 @@ public:
|
|||
pushed_index_cond(NULL),
|
||||
sjm_nest(NULL),
|
||||
pre_join_sort(NULL),
|
||||
jbuf_unpack_tracker(timed),
|
||||
rowid_filter(NULL)
|
||||
{}
|
||||
~Explain_table_access() { delete sjm_nest; }
|
||||
|
@ -844,7 +845,12 @@ public:
|
|||
|
||||
/* When using join buffer: Track the reads from join buffer */
|
||||
Table_access_tracker jbuf_tracker;
|
||||
|
||||
|
||||
/*
|
||||
Track the time to unpack rows from the join buffer.
|
||||
*/
|
||||
Time_and_counter_tracker jbuf_unpack_tracker;
|
||||
|
||||
/* When using join buffer: Track the number of incoming record combinations */
|
||||
Counter_tracker jbuf_loops_tracker;
|
||||
|
||||
|
|
|
@ -1614,7 +1614,7 @@ bool JOIN_CACHE::get_record()
|
|||
pos+= referenced_fields*size_of_fld_ofs;
|
||||
if (prev_cache)
|
||||
prev_cache->get_record_by_pos(prev_rec_ptr);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -2389,7 +2389,9 @@ enum_nested_loop_state JOIN_CACHE::join_matching_records(bool skip_last)
|
|||
(join_tab->first_inner && !not_exists_opt_is_applicable) ||
|
||||
!skip_next_candidate_for_match(rec_ptr))
|
||||
{
|
||||
read_next_candidate_for_match(rec_ptr);
|
||||
ANALYZE_START_TRACKING(join->thd, join_tab->jbuf_unpack_tracker);
|
||||
read_next_candidate_for_match(rec_ptr);
|
||||
ANALYZE_STOP_TRACKING(join->thd, join_tab->jbuf_unpack_tracker);
|
||||
rc= generate_full_extensions(rec_ptr);
|
||||
if (rc != NESTED_LOOP_OK && rc != NESTED_LOOP_NO_MORE_ROWS)
|
||||
goto finish;
|
||||
|
|
|
@ -27396,6 +27396,7 @@ bool JOIN_TAB::save_explain_data(Explain_table_access *eta,
|
|||
tracker= &eta->tracker;
|
||||
jbuf_tracker= &eta->jbuf_tracker;
|
||||
jbuf_loops_tracker= &eta->jbuf_loops_tracker;
|
||||
jbuf_unpack_tracker= &eta->jbuf_unpack_tracker;
|
||||
|
||||
/* Enable the table access time tracker only for "ANALYZE stmt" */
|
||||
if (thd->lex->analyze_stmt)
|
||||
|
@ -28029,7 +28030,8 @@ int JOIN::save_explain_data_intern(Explain_query *output,
|
|||
|
||||
|
||||
Explain_table_access *eta= (new (output->mem_root)
|
||||
Explain_table_access(output->mem_root));
|
||||
Explain_table_access(output->mem_root,
|
||||
thd->lex->analyze_stmt));
|
||||
|
||||
if (!eta)
|
||||
DBUG_RETURN(1);
|
||||
|
|
|
@ -309,6 +309,7 @@ typedef struct st_join_table {
|
|||
Table_access_tracker *tracker;
|
||||
|
||||
Table_access_tracker *jbuf_tracker;
|
||||
Time_and_counter_tracker *jbuf_unpack_tracker;
|
||||
Counter_tracker *jbuf_loops_tracker;
|
||||
/*
|
||||
Bitmap of TAB_INFO_* bits that encodes special line for EXPLAIN 'Extra'
|
||||
|
|
Loading…
Reference in a new issue