mirror of
https://github.com/MariaDB/server.git
synced 2025-04-12 10:15:31 +02:00
MDEV-6109: EXPLAIN JSON
- First code, "EXPLAIN FORMAT=JSON stmt" and "ANALYZE FORMAT=JSON stmt" work for basic queries. Complex constructs (e.g subqueries, etc) not yet supported. - No test infrastructure yet
This commit is contained in:
parent
0925ab9d88
commit
5cfd3270ec
15 changed files with 642 additions and 111 deletions
|
@ -100,6 +100,7 @@ SET(SQL_EMBEDDED_SOURCES emb_qcache.cc libmysqld.c lib_sql.cc
|
|||
../sql/rpl_reporting.cc
|
||||
../sql/sql_expression_cache.cc
|
||||
../sql/my_apc.cc ../sql/my_apc.h
|
||||
../sql/my_json_writer.cc ../sql/my_json_writer.h
|
||||
../sql/rpl_gtid.cc
|
||||
../sql/sql_explain.cc ../sql/sql_explain.h
|
||||
../sql/compat56.cc
|
||||
|
|
|
@ -90,6 +90,7 @@ SET (SQL_SOURCE
|
|||
threadpool_common.cc
|
||||
../sql-common/mysql_async.c
|
||||
my_apc.cc my_apc.h
|
||||
my_json_writer.cc my_json_writer.h
|
||||
rpl_gtid.cc rpl_parallel.cc
|
||||
table_cache.cc
|
||||
${CMAKE_CURRENT_BINARY_DIR}/sql_builtin.cc
|
||||
|
|
|
@ -242,6 +242,7 @@ static SYMBOL symbols[] = {
|
|||
{ "FOR", SYM(FOR_SYM)},
|
||||
{ "FORCE", SYM(FORCE_SYM)},
|
||||
{ "FOREIGN", SYM(FOREIGN)},
|
||||
{ "FORMAT", SYM(FORMAT_SYM)},
|
||||
{ "FOUND", SYM(FOUND_SYM)},
|
||||
{ "FROM", SYM(FROM)},
|
||||
{ "FULL", SYM(FULL)},
|
||||
|
|
129
sql/my_json_writer.cc
Normal file
129
sql/my_json_writer.cc
Normal file
|
@ -0,0 +1,129 @@
|
|||
/* Todo: SkySQL copyrights */
|
||||
|
||||
#include <my_global.h>
|
||||
#include "sql_priv.h"
|
||||
#include "sql_string.h"
|
||||
|
||||
#include "my_json_writer.h"
|
||||
|
||||
void Json_writer::append_indent()
|
||||
{
|
||||
if (!document_start)
|
||||
output.append('\n');
|
||||
for (int i=0; i< indent_level; i++)
|
||||
output.append(' ');
|
||||
}
|
||||
|
||||
void Json_writer::start_object()
|
||||
{
|
||||
if (!element_started)
|
||||
start_element();
|
||||
|
||||
output.append("{");
|
||||
indent_level+=INDENT_SIZE;
|
||||
first_child=true;
|
||||
element_started= false;
|
||||
document_start= false;
|
||||
}
|
||||
|
||||
void Json_writer::start_array()
|
||||
{
|
||||
if (!element_started)
|
||||
start_element();
|
||||
|
||||
output.append("[");
|
||||
indent_level+=INDENT_SIZE;
|
||||
first_child=true;
|
||||
element_started= false;
|
||||
document_start= false;
|
||||
}
|
||||
|
||||
|
||||
void Json_writer::end_object()
|
||||
{
|
||||
indent_level-=INDENT_SIZE;
|
||||
if (!first_child)
|
||||
append_indent();
|
||||
output.append("}");
|
||||
}
|
||||
|
||||
|
||||
void Json_writer::end_array()
|
||||
{
|
||||
indent_level-=INDENT_SIZE;
|
||||
if (!first_child)
|
||||
append_indent();
|
||||
output.append("]");
|
||||
}
|
||||
|
||||
|
||||
Json_writer& Json_writer::add_member(const char *name)
|
||||
{
|
||||
// assert that we are in an object
|
||||
DBUG_ASSERT(!element_started);
|
||||
start_element();
|
||||
|
||||
output.append('"');
|
||||
output.append(name);
|
||||
output.append("\": ");
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
void Json_writer::start_element()
|
||||
{
|
||||
element_started= true;
|
||||
|
||||
if (first_child)
|
||||
first_child= false;
|
||||
else
|
||||
output.append(',');
|
||||
|
||||
append_indent();
|
||||
}
|
||||
|
||||
void Json_writer::add_ll(longlong val)
|
||||
{
|
||||
if (!element_started)
|
||||
start_element();
|
||||
|
||||
char buf[64];
|
||||
my_snprintf(buf, sizeof(buf), "%ld", val);
|
||||
output.append(buf);
|
||||
element_started= false;
|
||||
}
|
||||
|
||||
|
||||
void Json_writer::add_double(double val)
|
||||
{
|
||||
if (!element_started)
|
||||
start_element();
|
||||
|
||||
char buf[64];
|
||||
my_snprintf(buf, sizeof(buf), "%lf", val);
|
||||
output.append(buf);
|
||||
element_started= false;
|
||||
}
|
||||
|
||||
|
||||
void Json_writer::add_str(const char *str)
|
||||
{
|
||||
if (!element_started)
|
||||
start_element();
|
||||
|
||||
output.append('"');
|
||||
output.append(str);
|
||||
output.append('"');
|
||||
element_started= false;
|
||||
}
|
||||
|
||||
void Json_writer::add_bool(bool val)
|
||||
{
|
||||
add_str(val? "true" : "false");
|
||||
}
|
||||
|
||||
void Json_writer::add_str(const String &str)
|
||||
{
|
||||
add_str(str.ptr());
|
||||
}
|
||||
|
43
sql/my_json_writer.h
Normal file
43
sql/my_json_writer.h
Normal file
|
@ -0,0 +1,43 @@
|
|||
/* Todo: SkySQL copyrights */
|
||||
|
||||
class Json_writer
|
||||
{
|
||||
public:
|
||||
/* Add a member. We must be in an object. */
|
||||
Json_writer& add_member(const char *name);
|
||||
|
||||
/* Add atomic values */
|
||||
void add_ll(longlong val);
|
||||
void add_str(const char* val);
|
||||
void add_str(const String &str);
|
||||
void add_double(double val);
|
||||
void add_bool(bool val);
|
||||
|
||||
/* Start a child object */
|
||||
void start_object();
|
||||
void start_array();
|
||||
|
||||
void end_object();
|
||||
void end_array();
|
||||
|
||||
Json_writer() :
|
||||
indent_level(0), document_start(true), element_started(false),
|
||||
first_child(true)
|
||||
{}
|
||||
private:
|
||||
// stack of (name, bool is_object_or_array) elements.
|
||||
int indent_level;
|
||||
enum { INDENT_SIZE = 2 };
|
||||
|
||||
bool document_start;
|
||||
bool element_started;
|
||||
bool first_child;
|
||||
|
||||
void append_indent();
|
||||
void start_element();
|
||||
|
||||
//const char *new_member_name;
|
||||
public:
|
||||
String output;
|
||||
};
|
||||
|
|
@ -2271,7 +2271,10 @@ CHANGED_TABLE_LIST* THD::changed_table_dup(const char *key, long key_length)
|
|||
int THD::send_explain_fields(select_result *result)
|
||||
{
|
||||
List<Item> field_list;
|
||||
make_explain_field_list(field_list);
|
||||
if (lex->explain_json)
|
||||
make_explain_json_field_list(field_list);
|
||||
else
|
||||
make_explain_field_list(field_list);
|
||||
result->prepare(field_list, NULL);
|
||||
return (result->send_result_set_metadata(field_list,
|
||||
Protocol::SEND_NUM_ROWS |
|
||||
|
@ -2279,6 +2282,13 @@ int THD::send_explain_fields(select_result *result)
|
|||
}
|
||||
|
||||
|
||||
void THD::make_explain_json_field_list(List<Item> &field_list)
|
||||
{
|
||||
Item *item= new Item_empty_string("EXPLAIN", 78, system_charset_info);
|
||||
field_list.push_back(item);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Populate the provided field_list with EXPLAIN output columns.
|
||||
this->lex->describe has the EXPLAIN flags
|
||||
|
|
|
@ -3059,6 +3059,7 @@ public:
|
|||
CHANGED_TABLE_LIST * changed_table_dup(const char *key, long key_length);
|
||||
int send_explain_fields(select_result *result);
|
||||
void make_explain_field_list(List<Item> &field_list);
|
||||
void make_explain_json_field_list(List<Item> &field_list);
|
||||
/**
|
||||
Clear the current error, if any.
|
||||
We do not clear is_fatal_error or is_fatal_sub_stmt_error since we
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
|
||||
#include "sql_priv.h"
|
||||
#include "sql_select.h"
|
||||
|
||||
#include "my_json_writer.h"
|
||||
|
||||
Explain_query::Explain_query(THD *thd_arg) :
|
||||
upd_del_plan(NULL), insert_plan(NULL), thd(thd_arg), apc_enabled(false)
|
||||
|
@ -139,8 +139,13 @@ int Explain_query::send_explain(THD *thd)
|
|||
thd->send_explain_fields(result))
|
||||
return 1;
|
||||
|
||||
int res;
|
||||
if ((res= print_explain(result, lex->describe, lex->analyze_stmt)))
|
||||
int res= 0;
|
||||
if (thd->lex->explain_json)
|
||||
print_explain_json(result, thd->lex->analyze_stmt);
|
||||
else
|
||||
res= print_explain(result, lex->describe, thd->lex->analyze_stmt);
|
||||
|
||||
if (res)
|
||||
result->abort_result_set();
|
||||
else
|
||||
result->send_eof();
|
||||
|
@ -177,6 +182,40 @@ int Explain_query::print_explain(select_result_sink *output,
|
|||
}
|
||||
|
||||
|
||||
void Explain_query::print_explain_json(select_result_sink *output, bool is_analyze)
|
||||
{
|
||||
Json_writer writer;
|
||||
writer.start_object();
|
||||
|
||||
if (upd_del_plan)
|
||||
{
|
||||
//upd_del_plan->print_explain(this, output, explain_flags, is_analyze);
|
||||
DBUG_ASSERT(0);
|
||||
}
|
||||
else if (insert_plan)
|
||||
{
|
||||
//insert_plan->print_explain(this, output, explain_flags, is_analyze);
|
||||
DBUG_ASSERT(0);
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Start printing from node with id=1 */
|
||||
Explain_node *node= get_node(1);
|
||||
if (!node)
|
||||
return; /* No query plan */
|
||||
node->print_explain_json(this, &writer, is_analyze);
|
||||
}
|
||||
|
||||
writer.end_object();
|
||||
|
||||
const CHARSET_INFO *cs= system_charset_info;
|
||||
List<Item> item_list;
|
||||
String *buf= &writer.output;
|
||||
item_list.push_back(new Item_string(buf->ptr(), buf->length(), cs));
|
||||
output->send_data(item_list);
|
||||
}
|
||||
|
||||
|
||||
bool print_explain_query(LEX *lex, THD *thd, String *str)
|
||||
{
|
||||
return lex->explain->print_explain_str(thd, str, false);
|
||||
|
@ -214,12 +253,59 @@ static void push_string(List<Item> *item_list, String *str)
|
|||
system_charset_info));
|
||||
}
|
||||
|
||||
static void push_string_list(List<Item> *item_list, List<char> &lines,
|
||||
String *buf)
|
||||
{
|
||||
List_iterator_fast<char> it(lines);
|
||||
char *line;
|
||||
bool first= true;
|
||||
while ((line= it++))
|
||||
{
|
||||
if (first)
|
||||
first= false;
|
||||
else
|
||||
buf->append(',');
|
||||
|
||||
buf->append(line);
|
||||
}
|
||||
push_string(item_list, buf);
|
||||
}
|
||||
|
||||
|
||||
uint Explain_union::make_union_table_name(char *buf)
|
||||
{
|
||||
uint childno= 0;
|
||||
uint len= 6, lastop= 0;
|
||||
memcpy(buf, STRING_WITH_LEN("<union"));
|
||||
|
||||
for (; childno < union_members.elements() && len + lastop + 5 < NAME_LEN;
|
||||
childno++)
|
||||
{
|
||||
len+= lastop;
|
||||
lastop= my_snprintf(buf + len, NAME_LEN - len,
|
||||
"%u,", union_members.at(childno));
|
||||
}
|
||||
|
||||
if (childno < union_members.elements() || len + lastop >= NAME_LEN)
|
||||
{
|
||||
memcpy(buf + len, STRING_WITH_LEN("...>") + 1);
|
||||
len+= 4;
|
||||
}
|
||||
else
|
||||
{
|
||||
len+= lastop;
|
||||
buf[len - 1]= '>'; // change ',' to '>'
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
|
||||
int Explain_union::print_explain(Explain_query *query,
|
||||
select_result_sink *output,
|
||||
uint8 explain_flags,
|
||||
bool is_analyze)
|
||||
{
|
||||
const CHARSET_INFO *cs= system_charset_info;
|
||||
char table_name_buffer[SAFE_NAME_LEN];
|
||||
|
||||
/* print all UNION children, in order */
|
||||
|
@ -240,32 +326,8 @@ int Explain_union::print_explain(Explain_query *query,
|
|||
push_str(&item_list, fake_select_type);
|
||||
|
||||
/* `table` column: something like "<union1,2>" */
|
||||
{
|
||||
uint childno= 0;
|
||||
uint len= 6, lastop= 0;
|
||||
memcpy(table_name_buffer, STRING_WITH_LEN("<union"));
|
||||
|
||||
for (; childno < union_members.elements() && len + lastop + 5 < NAME_LEN;
|
||||
childno++)
|
||||
{
|
||||
len+= lastop;
|
||||
lastop= my_snprintf(table_name_buffer + len, NAME_LEN - len,
|
||||
"%u,", union_members.at(childno));
|
||||
}
|
||||
|
||||
if (childno < union_members.elements() || len + lastop >= NAME_LEN)
|
||||
{
|
||||
memcpy(table_name_buffer + len, STRING_WITH_LEN("...>") + 1);
|
||||
len+= 4;
|
||||
}
|
||||
else
|
||||
{
|
||||
len+= lastop;
|
||||
table_name_buffer[len - 1]= '>'; // change ',' to '>'
|
||||
}
|
||||
const CHARSET_INFO *cs= system_charset_info;
|
||||
item_list.push_back(new Item_string(table_name_buffer, len, cs));
|
||||
}
|
||||
uint len= make_union_table_name(table_name_buffer);
|
||||
item_list.push_back(new Item_string(table_name_buffer, len, cs));
|
||||
|
||||
/* `partitions` column */
|
||||
if (explain_flags & DESCRIBE_PARTITIONS)
|
||||
|
@ -307,7 +369,6 @@ int Explain_union::print_explain(Explain_query *query,
|
|||
{
|
||||
extra_buf.append(STRING_WITH_LEN("Using filesort"));
|
||||
}
|
||||
const CHARSET_INFO *cs= system_charset_info;
|
||||
item_list.push_back(new Item_string(extra_buf.ptr(), extra_buf.length(), cs));
|
||||
|
||||
//output->unit.offset_limit_cnt= 0;
|
||||
|
@ -322,6 +383,36 @@ int Explain_union::print_explain(Explain_query *query,
|
|||
}
|
||||
|
||||
|
||||
void Explain_union::print_explain_json(Explain_query *query,
|
||||
Json_writer *writer, bool is_analyze)
|
||||
{
|
||||
char table_name_buffer[SAFE_NAME_LEN];
|
||||
|
||||
writer->add_member("query_block").start_object();
|
||||
writer->add_member("union_result").start_object();
|
||||
// using_temporary_table
|
||||
make_union_table_name(table_name_buffer);
|
||||
writer->add_member("table_name").add_str(table_name_buffer);
|
||||
writer->add_member("access_type").add_str("ALL"); // not very useful
|
||||
writer->add_member("query_specifications").start_array();
|
||||
|
||||
for (int i= 0; i < (int) union_members.elements(); i++)
|
||||
{
|
||||
writer->start_object();
|
||||
writer->add_member("dependent").add_str("TODO");
|
||||
writer->add_member("cacheable").add_str("TODO");
|
||||
Explain_select *sel= query->get_select(union_members.at(i));
|
||||
sel->print_explain_json(query, writer, is_analyze);
|
||||
writer->end_object();
|
||||
}
|
||||
writer->end_array();
|
||||
|
||||
//TODO: print_explain_for_children
|
||||
|
||||
writer->end_object();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Print EXPLAINs for all children nodes (i.e. for subqueries)
|
||||
*/
|
||||
|
@ -410,21 +501,112 @@ int Explain_select::print_explain(Explain_query *query,
|
|||
}
|
||||
|
||||
|
||||
void Explain_select::print_explain_json(Explain_query *query,
|
||||
Json_writer *writer, bool is_analyze)
|
||||
{
|
||||
writer->add_member("query_block").start_object();
|
||||
writer->add_member("select_id").add_ll(1);
|
||||
if (message)
|
||||
{
|
||||
writer->add_member("table").start_object();
|
||||
writer->add_member("message").add_str(message);
|
||||
writer->end_object();
|
||||
}
|
||||
else
|
||||
{
|
||||
for (uint i=0; i< n_join_tabs; i++)
|
||||
{
|
||||
// psergey-todo: Need to honor SJM nests...
|
||||
join_tabs[i]->print_explain_json(writer, is_analyze);
|
||||
}
|
||||
}
|
||||
writer->end_object();
|
||||
}
|
||||
|
||||
|
||||
void Explain_table_access::push_extra(enum explain_extra_tag extra_tag)
|
||||
{
|
||||
extra_tags.append(extra_tag);
|
||||
}
|
||||
|
||||
|
||||
void Explain_table_access::fill_key_str(String *key_str)
|
||||
{
|
||||
const CHARSET_INFO *cs= system_charset_info;
|
||||
bool is_hj= (type == JT_HASH || type == JT_HASH_NEXT ||
|
||||
type == JT_HASH_RANGE || type == JT_HASH_INDEX_MERGE);
|
||||
const char *hash_key_prefix= "#hash#";
|
||||
|
||||
if (key.get_key_name())
|
||||
{
|
||||
if (is_hj)
|
||||
key_str->append(hash_key_prefix, strlen(hash_key_prefix), cs);
|
||||
|
||||
key_str->append(key.get_key_name());
|
||||
|
||||
if (is_hj && type != JT_HASH)
|
||||
key_str->append(':');
|
||||
}
|
||||
|
||||
if (quick_info)
|
||||
{
|
||||
StringBuffer<64> buf2;
|
||||
quick_info->print_key(&buf2);
|
||||
key_str->append(buf2);
|
||||
}
|
||||
if (type == JT_HASH_NEXT)
|
||||
key_str->append(hash_next_key.get_key_name());
|
||||
}
|
||||
|
||||
|
||||
void Explain_table_access::fill_key_len_str(String *key_len_str)
|
||||
{
|
||||
bool is_hj= (type == JT_HASH || type == JT_HASH_NEXT ||
|
||||
type == JT_HASH_RANGE || type == JT_HASH_INDEX_MERGE);
|
||||
if (key.get_key_len() != (uint)-1)
|
||||
{
|
||||
char buf[64];
|
||||
size_t length;
|
||||
length= longlong10_to_str(key.get_key_len(), buf, 10) - buf;
|
||||
key_len_str->append(buf, length);
|
||||
if (is_hj && type != JT_HASH)
|
||||
key_len_str->append(':');
|
||||
}
|
||||
|
||||
if (quick_info)
|
||||
{
|
||||
StringBuffer<64> buf2;
|
||||
quick_info->print_key_len(&buf2);
|
||||
key_len_str->append(buf2);
|
||||
}
|
||||
|
||||
if (type == JT_HASH_NEXT)
|
||||
{
|
||||
char buf[64];
|
||||
size_t length;
|
||||
length= longlong10_to_str(hash_next_key.get_key_len(), buf, 10) - buf;
|
||||
key_len_str->append(buf, length);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
double Explain_table_access::get_r_filtered()
|
||||
{
|
||||
double r_filtered;
|
||||
if (r_rows > 0)
|
||||
r_filtered= 100.0 * (double)r_rows_after_table_cond / r_rows;
|
||||
else
|
||||
r_filtered= 100.0;
|
||||
return r_filtered;
|
||||
}
|
||||
|
||||
|
||||
int Explain_table_access::print_explain(select_result_sink *output, uint8 explain_flags,
|
||||
bool is_analyze,
|
||||
uint select_id, const char *select_type,
|
||||
bool using_temporary, bool using_filesort)
|
||||
{
|
||||
const CHARSET_INFO *cs= system_charset_info;
|
||||
const char *hash_key_prefix= "#hash#";
|
||||
bool is_hj= (type == JT_HASH || type == JT_HASH_NEXT ||
|
||||
type == JT_HASH_RANGE || type == JT_HASH_INDEX_MERGE);
|
||||
|
||||
List<Item> item_list;
|
||||
Item *item_null= new Item_null();
|
||||
|
@ -459,32 +641,15 @@ int Explain_table_access::print_explain(select_result_sink *output, uint8 explai
|
|||
push_str(&item_list, join_type_str[type]);
|
||||
|
||||
/* `possible_keys` column */
|
||||
if (possible_keys_str.length() > 0)
|
||||
push_string(&item_list, &possible_keys_str);
|
||||
else
|
||||
StringBuffer<64> possible_keys_buf;
|
||||
if (possible_keys.is_empty())
|
||||
item_list.push_back(item_null);
|
||||
else
|
||||
push_string_list(&item_list, possible_keys, &possible_keys_buf);
|
||||
|
||||
/* `key` */
|
||||
StringBuffer<64> key_str;
|
||||
if (key.get_key_name())
|
||||
{
|
||||
if (is_hj)
|
||||
key_str.append(hash_key_prefix, strlen(hash_key_prefix), cs);
|
||||
|
||||
key_str.append(key.get_key_name());
|
||||
|
||||
if (is_hj && type != JT_HASH)
|
||||
key_str.append(':');
|
||||
}
|
||||
|
||||
if (quick_info)
|
||||
{
|
||||
StringBuffer<64> buf2;
|
||||
quick_info->print_key(&buf2);
|
||||
key_str.append(buf2);
|
||||
}
|
||||
if (type == JT_HASH_NEXT)
|
||||
key_str.append(hash_next_key.get_key_name());
|
||||
fill_key_str(&key_str);
|
||||
|
||||
if (key_str.length() > 0)
|
||||
push_string(&item_list, &key_str);
|
||||
|
@ -493,31 +658,7 @@ int Explain_table_access::print_explain(select_result_sink *output, uint8 explai
|
|||
|
||||
/* `key_len` */
|
||||
StringBuffer<64> key_len_str;
|
||||
|
||||
if (key.get_key_len() != (uint)-1)
|
||||
{
|
||||
char buf[64];
|
||||
size_t length;
|
||||
length= longlong10_to_str(key.get_key_len(), buf, 10) - buf;
|
||||
key_len_str.append(buf, length);
|
||||
if (is_hj && type != JT_HASH)
|
||||
key_len_str.append(':');
|
||||
}
|
||||
|
||||
if (quick_info)
|
||||
{
|
||||
StringBuffer<64> buf2;
|
||||
quick_info->print_key_len(&buf2);
|
||||
key_len_str.append(buf2);
|
||||
}
|
||||
|
||||
if (type == JT_HASH_NEXT)
|
||||
{
|
||||
char buf[64];
|
||||
size_t length;
|
||||
length= longlong10_to_str(hash_next_key.get_key_len(), buf, 10) - buf;
|
||||
key_len_str.append(buf, length);
|
||||
}
|
||||
fill_key_len_str(&key_len_str);
|
||||
|
||||
if (key_len_str.length() > 0)
|
||||
push_string(&item_list, &key_len_str);
|
||||
|
@ -561,12 +702,7 @@ int Explain_table_access::print_explain(select_result_sink *output, uint8 explai
|
|||
/* `r_filtered` */
|
||||
if (is_analyze)
|
||||
{
|
||||
double r_filtered;
|
||||
if (r_rows > 0)
|
||||
r_filtered= 100.0 * (double)r_rows_after_table_cond / r_rows;
|
||||
else
|
||||
r_filtered= 100.0;
|
||||
item_list.push_back(new Item_float(r_filtered, 2));
|
||||
item_list.push_back(new Item_float(get_r_filtered(), 2));
|
||||
}
|
||||
|
||||
/* `Extra` */
|
||||
|
@ -608,6 +744,115 @@ int Explain_table_access::print_explain(select_result_sink *output, uint8 explai
|
|||
}
|
||||
|
||||
|
||||
static void write_item(Json_writer *writer, Item *item)
|
||||
{
|
||||
char item_buf[256];
|
||||
String str(item_buf, sizeof(item_buf), &my_charset_bin);
|
||||
str.length(0);
|
||||
item->print(&str ,QT_ORDINARY);
|
||||
writer->add_str(str.c_ptr_safe());
|
||||
}
|
||||
|
||||
|
||||
void Explain_table_access::tag_to_json(Json_writer *writer, enum explain_extra_tag tag)
|
||||
{
|
||||
switch (tag)
|
||||
{
|
||||
case ET_OPEN_FULL_TABLE:
|
||||
writer->add_member("open_full_table").add_bool(true);
|
||||
break;
|
||||
case ET_SCANNED_0_DATABASES:
|
||||
writer->add_member("scanned_databases").add_ll(0);
|
||||
break;
|
||||
case ET_SCANNED_1_DATABASE:
|
||||
writer->add_member("scanned_databases").add_ll(1);
|
||||
break;
|
||||
case ET_SCANNED_ALL_DATABASES:
|
||||
writer->add_member("scanned_databases").add_str("all");
|
||||
break;
|
||||
case ET_SKIP_OPEN_TABLE:
|
||||
writer->add_member("skip_open_table").add_bool(true);
|
||||
break;
|
||||
case ET_OPEN_FRM_ONLY:
|
||||
writer->add_member("open_frm_only").add_bool(true);
|
||||
break;
|
||||
case ET_USING_INDEX_CONDITION:
|
||||
writer->add_member("index_condition");
|
||||
write_item(writer, pushed_index_cond);
|
||||
break;
|
||||
case ET_USING_WHERE:
|
||||
writer->add_member("attached_condition");
|
||||
write_item(writer, where_cond);
|
||||
break;
|
||||
case ET_USING_INDEX:
|
||||
writer->add_member("using_index").add_bool(true);
|
||||
break;
|
||||
case ET_USING:
|
||||
// index merge: case ET_USING
|
||||
break;
|
||||
default:
|
||||
DBUG_ASSERT(0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void Explain_table_access::print_explain_json(Json_writer *writer,
|
||||
bool is_analyze)
|
||||
{
|
||||
writer->add_member("table").start_object();
|
||||
|
||||
writer->add_member("table_name").add_str(table_name);
|
||||
// partitions
|
||||
writer->add_member("access_type").add_str(join_type_str[type]);
|
||||
if (!possible_keys.is_empty())
|
||||
{
|
||||
List_iterator_fast<char> it(possible_keys);
|
||||
const char *name;
|
||||
writer->add_member("possible_keys").start_array();
|
||||
while ((name= it++))
|
||||
writer->add_str(name);
|
||||
writer->end_array();
|
||||
}
|
||||
/* `key` */
|
||||
StringBuffer<64> key_str;
|
||||
fill_key_str(&key_str);
|
||||
if (key_str.length())
|
||||
writer->add_member("key").add_str(key_str);
|
||||
|
||||
/* `used_key_parts` */
|
||||
writer->add_member("used_key_parts").add_str("TODO");
|
||||
|
||||
StringBuffer<64> key_len_str;
|
||||
fill_key_len_str(&key_len_str);
|
||||
if (key_len_str.length())
|
||||
writer->add_member("key_length").add_str(key_len_str);
|
||||
|
||||
if (rows_set)
|
||||
writer->add_member("rows").add_ll(rows);
|
||||
|
||||
/* `r_rows` */
|
||||
if (is_analyze)
|
||||
{
|
||||
ha_rows avg_rows= r_scans ? round((double) r_rows / r_scans): 0;
|
||||
writer->add_member("r_rows").add_ll(avg_rows);
|
||||
}
|
||||
|
||||
if (filtered_set)
|
||||
writer->add_member("filtered").add_double(filtered);
|
||||
|
||||
/* `r_filtered` */
|
||||
if (is_analyze)
|
||||
writer->add_member("r_filtered").add_double(get_r_filtered());
|
||||
|
||||
for (int i=0; i < (int)extra_tags.elements(); i++)
|
||||
{
|
||||
tag_to_json(writer, extra_tags.at(i));
|
||||
}
|
||||
|
||||
writer->end_object();
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Elements in this array match members of enum Extra_tag, defined in
|
||||
sql_explain.h
|
||||
|
|
|
@ -31,6 +31,7 @@ const int FAKE_SELECT_LEX_ID= (int)UINT_MAX;
|
|||
|
||||
class Explain_query;
|
||||
|
||||
class Json_writer;
|
||||
/*
|
||||
A node can be either a SELECT, or a UNION.
|
||||
*/
|
||||
|
@ -61,7 +62,9 @@ public:
|
|||
|
||||
virtual int print_explain(Explain_query *query, select_result_sink *output,
|
||||
uint8 explain_flags, bool is_analyze)=0;
|
||||
|
||||
virtual void print_explain_json(Explain_query *query, Json_writer *writer,
|
||||
bool is_analyze)= 0;
|
||||
|
||||
int print_explain_for_children(Explain_query *query, select_result_sink *output,
|
||||
uint8 explain_flags, bool is_analyze);
|
||||
virtual ~Explain_node(){}
|
||||
|
@ -135,6 +138,8 @@ public:
|
|||
|
||||
int print_explain(Explain_query *query, select_result_sink *output,
|
||||
uint8 explain_flags, bool is_analyze);
|
||||
void print_explain_json(Explain_query *query, Json_writer *writer,
|
||||
bool is_analyze);
|
||||
};
|
||||
|
||||
|
||||
|
@ -173,9 +178,13 @@ public:
|
|||
}
|
||||
int print_explain(Explain_query *query, select_result_sink *output,
|
||||
uint8 explain_flags, bool is_analyze);
|
||||
void print_explain_json(Explain_query *query, Json_writer *writer,
|
||||
bool is_analyze);
|
||||
|
||||
const char *fake_select_type;
|
||||
bool using_filesort;
|
||||
private:
|
||||
uint make_union_table_name(char *buf);
|
||||
};
|
||||
|
||||
|
||||
|
@ -247,6 +256,8 @@ public:
|
|||
/* Return tabular EXPLAIN output as a text string */
|
||||
bool print_explain_str(THD *thd, String *out_str, bool is_analyze);
|
||||
|
||||
void print_explain_json(select_result_sink *output, bool is_analyze);
|
||||
|
||||
/* If true, at least part of EXPLAIN can be printed */
|
||||
bool have_query_plan() { return insert_plan || upd_del_plan|| get_node(1) != NULL; }
|
||||
|
||||
|
@ -411,7 +422,8 @@ public:
|
|||
bool used_partitions_set;
|
||||
|
||||
/* Empty string means "NULL" will be printed */
|
||||
StringBuffer<32> possible_keys_str;
|
||||
List<char> possible_keys;
|
||||
//StringBuffer<32> possible_keys_str;
|
||||
|
||||
/*
|
||||
Index use: key name and length.
|
||||
|
@ -460,11 +472,19 @@ public:
|
|||
EXPLAIN_BKA_TYPE bka_type;
|
||||
|
||||
StringBuffer<32> firstmatch_table_name;
|
||||
|
||||
/*
|
||||
Note: lifespan of WHERE condition is less than lifespan of this object.
|
||||
THe below is valid if tags include "ET_USING_WHERE".
|
||||
*/
|
||||
Item *where_cond;
|
||||
Item *pushed_index_cond;
|
||||
|
||||
int print_explain(select_result_sink *output, uint8 explain_flags,
|
||||
bool is_analyze,
|
||||
uint select_id, const char *select_type,
|
||||
bool using_temporary, bool using_filesort);
|
||||
void print_explain_json(Json_writer *writer, bool is_analyze);
|
||||
|
||||
/* ANALYZE members*/
|
||||
ha_rows r_scans; /* How many scans were ran on this join_tab */
|
||||
|
@ -479,6 +499,10 @@ public:
|
|||
|
||||
private:
|
||||
void append_tag_name(String *str, enum explain_extra_tag tag);
|
||||
void fill_key_str(String *key_str);
|
||||
void fill_key_len_str(String *key_len_str);
|
||||
double get_r_filtered();
|
||||
void tag_to_json(Json_writer *writer, enum explain_extra_tag tag);
|
||||
};
|
||||
|
||||
|
||||
|
@ -529,6 +553,8 @@ public:
|
|||
{}
|
||||
virtual int print_explain(Explain_query *query, select_result_sink *output,
|
||||
uint8 explain_flags, bool is_analyze);
|
||||
virtual void print_explain_json(Explain_query *query, Json_writer *writer, bool is_analyze)
|
||||
{ /* EXPLAIN_JSON_NOT_IMPL */}
|
||||
};
|
||||
|
||||
|
||||
|
@ -549,6 +575,9 @@ public:
|
|||
|
||||
int print_explain(Explain_query *query, select_result_sink *output,
|
||||
uint8 explain_flags, bool is_analyze);
|
||||
void print_explain_json(Explain_query *query, Json_writer *writer,
|
||||
bool is_analyze)
|
||||
{ /* EXPLAIN_JSON_NOT_IMPL */}
|
||||
};
|
||||
|
||||
|
||||
|
@ -570,6 +599,8 @@ public:
|
|||
|
||||
virtual int print_explain(Explain_query *query, select_result_sink *output,
|
||||
uint8 explain_flags, bool is_analyze);
|
||||
virtual void print_explain_json(Explain_query *query, Json_writer *writer, bool is_analyze)
|
||||
{ /* EXPLAIN_JSON_NOT_IMPL */}
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -484,6 +484,7 @@ void lex_start(THD *thd)
|
|||
lex->select_lex.group_list_ptrs->clear();
|
||||
lex->describe= 0;
|
||||
lex->analyze_stmt= 0;
|
||||
lex->explain_json= false;
|
||||
lex->subqueries= FALSE;
|
||||
lex->context_analysis_only= 0;
|
||||
lex->derived_tables= 0;
|
||||
|
|
|
@ -2461,6 +2461,7 @@ struct LEX: public Query_tables_list
|
|||
uint table_count;
|
||||
uint8 describe;
|
||||
bool analyze_stmt; /* TRUE<=> this is "ANALYZE $stmt" */
|
||||
bool explain_json;
|
||||
/*
|
||||
A flag that indicates what kinds of derived tables are present in the
|
||||
query (0 if no derived tables, otherwise a combination of flags
|
||||
|
|
|
@ -97,6 +97,8 @@
|
|||
#include "log_slow.h"
|
||||
#include "sql_bootstrap.h"
|
||||
|
||||
#include "my_json_writer.h"
|
||||
|
||||
#define FLAGSTR(V,F) ((V)&(F)?#F" ":"")
|
||||
|
||||
#ifdef WITH_ARIA_STORAGE_ENGINE
|
||||
|
@ -5233,20 +5235,39 @@ static bool execute_sqlcom_select(THD *thd, TABLE_LIST *all_tables)
|
|||
top-level LIMIT
|
||||
*/
|
||||
result->reset_offset_limit();
|
||||
thd->lex->explain->print_explain(result, thd->lex->describe,
|
||||
thd->lex->analyze_stmt);
|
||||
if (lex->describe & DESCRIBE_EXTENDED)
|
||||
if (thd->lex->explain_json)
|
||||
{
|
||||
char buff[1024];
|
||||
String str(buff,(uint32) sizeof(buff), system_charset_info);
|
||||
str.length(0);
|
||||
/*
|
||||
The warnings system requires input in utf8, @see
|
||||
mysqld_show_warnings().
|
||||
Json_writer writer;
|
||||
writer.start_object();
|
||||
thd->lex->explain->print_explain_json(&writer, thd->lex->analyze_stmt);
|
||||
writer.end_object();
|
||||
|
||||
const CHARSET_INFO *cs= system_charset_info;
|
||||
List<Item> item_list;
|
||||
String *buf= &writer.output;
|
||||
item_list.push_back(new Item_string(buf->ptr(), buf->length(), cs));
|
||||
result->send_data(item_list);
|
||||
*/
|
||||
thd->lex->unit.print(&str, QT_TO_SYSTEM_CHARSET);
|
||||
push_warning(thd, Sql_condition::WARN_LEVEL_NOTE,
|
||||
ER_YES, str.c_ptr_safe());
|
||||
thd->lex->explain->print_explain_json(result, thd->lex->analyze_stmt);
|
||||
}
|
||||
else
|
||||
{
|
||||
thd->lex->explain->print_explain(result, thd->lex->describe,
|
||||
thd->lex->analyze_stmt);
|
||||
if (lex->describe & DESCRIBE_EXTENDED)
|
||||
{
|
||||
char buff[1024];
|
||||
String str(buff,(uint32) sizeof(buff), system_charset_info);
|
||||
str.length(0);
|
||||
/*
|
||||
The warnings system requires input in utf8, @see
|
||||
mysqld_show_warnings().
|
||||
*/
|
||||
thd->lex->unit.print(&str, QT_TO_SYSTEM_CHARSET);
|
||||
push_warning(thd, Sql_condition::WARN_LEVEL_NOTE,
|
||||
ER_YES, str.c_ptr_safe());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22913,7 +22913,6 @@ void JOIN::clear()
|
|||
|
||||
/*
|
||||
Print an EXPLAIN line with all NULLs and given message in the 'Extra' column
|
||||
TODO: is_analyze
|
||||
*/
|
||||
|
||||
int print_explain_message_line(select_result_sink *result,
|
||||
|
@ -23201,20 +23200,24 @@ void explain_append_mrr_info(QUICK_RANGE_SELECT *quick, String *res)
|
|||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// TODO: join with make_possible_keys_line ?
|
||||
void append_possible_keys(String *str, TABLE *table, key_map possible_keys)
|
||||
int append_possible_keys(MEM_ROOT *alloc, List<char> &list, TABLE *table,
|
||||
key_map possible_keys)
|
||||
{
|
||||
uint j;
|
||||
for (j=0 ; j < table->s->keys ; j++)
|
||||
{
|
||||
if (possible_keys.is_set(j))
|
||||
{
|
||||
if (str->length())
|
||||
str->append(',');
|
||||
str->append(table->key_info[j].name,
|
||||
strlen(table->key_info[j].name),
|
||||
system_charset_info);
|
||||
const char *key_name= table->key_info[j].name;
|
||||
size_t len= strlen(key_name);
|
||||
char *cp;
|
||||
if (!(cp = (char*)alloc_root(alloc, len)))
|
||||
return 1;
|
||||
memcpy(cp, key_name, len+1);
|
||||
list.push_back(cp);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
@ -23387,7 +23390,9 @@ int JOIN::save_explain_data_intern(Explain_query *output, bool need_tmp_table,
|
|||
eta->type= tab_type;
|
||||
|
||||
/* Build "possible_keys" value */
|
||||
append_possible_keys(&eta->possible_keys_str, table, tab->keys);
|
||||
if (append_possible_keys(thd->mem_root, eta->possible_keys, table,
|
||||
tab->keys))
|
||||
DBUG_RETURN(1);
|
||||
|
||||
/* Build "key", "key_len", and "ref" */
|
||||
if (tab_type == JT_NEXT)
|
||||
|
@ -23544,7 +23549,10 @@ int JOIN::save_explain_data_intern(Explain_query *output, bool need_tmp_table,
|
|||
|
||||
if (keyno != MAX_KEY && keyno == table->file->pushed_idx_cond_keyno &&
|
||||
table->file->pushed_idx_cond)
|
||||
{
|
||||
eta->push_extra(ET_USING_INDEX_CONDITION);
|
||||
eta->pushed_index_cond= table->file->pushed_idx_cond;
|
||||
}
|
||||
else if (tab->cache_idx_cond)
|
||||
eta->push_extra(ET_USING_INDEX_CONDITION_BKA);
|
||||
|
||||
|
@ -23585,7 +23593,11 @@ int JOIN::save_explain_data_intern(Explain_query *output, bool need_tmp_table,
|
|||
*/
|
||||
}
|
||||
else
|
||||
{
|
||||
eta->where_cond= tab->select->cond? tab->select->cond:
|
||||
tab->cache_select->cond;
|
||||
eta->push_extra(ET_USING_WHERE);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (table_list /* SJM bushes don't have table_list */ &&
|
||||
|
|
|
@ -1920,4 +1920,5 @@ ulong check_selectivity(THD *thd,
|
|||
TABLE *table,
|
||||
List<COND_STATISTIC> *conds);
|
||||
|
||||
|
||||
#endif /* SQL_SELECT_INCLUDED */
|
||||
|
|
|
@ -1178,6 +1178,7 @@ bool my_yyoverflow(short **a, YYSTYPE **b, ulong *yystacksize);
|
|||
%token FORCE_SYM
|
||||
%token FOREIGN /* SQL-2003-R */
|
||||
%token FOR_SYM /* SQL-2003-R */
|
||||
%token FORMAT_SYM
|
||||
%token FOUND_SYM /* SQL-2003-R */
|
||||
%token FROM
|
||||
%token FULL /* SQL-2003-R */
|
||||
|
@ -1847,6 +1848,7 @@ bool my_yyoverflow(short **a, YYSTYPE **b, ulong *yystacksize);
|
|||
subselect_end select_var_list select_var_list_init help
|
||||
field_length opt_field_length
|
||||
opt_extended_describe shutdown
|
||||
opt_format_json
|
||||
prepare prepare_src execute deallocate
|
||||
statement sp_suid
|
||||
sp_c_chistics sp_a_chistics sp_chistic sp_c_chistic xa
|
||||
|
@ -9756,6 +9758,18 @@ function_call_conflict:
|
|||
if ($$ == NULL)
|
||||
MYSQL_YYABORT;
|
||||
}
|
||||
| FORMAT_SYM '(' expr ',' expr ')'
|
||||
{
|
||||
$$= new (thd->mem_root) Item_func_format($3, $5);
|
||||
if ($$ == NULL)
|
||||
MYSQL_YYABORT;
|
||||
}
|
||||
| FORMAT_SYM '(' expr ',' expr ',' expr ')'
|
||||
{
|
||||
$$= new (thd->mem_root) Item_func_format($3, $5, $7);
|
||||
if ($$ == NULL)
|
||||
MYSQL_YYABORT;
|
||||
}
|
||||
| LAST_VALUE '(' expr_list ')'
|
||||
{
|
||||
$$= new (thd->mem_root) Item_func_last_value(* $3);
|
||||
|
@ -12768,16 +12782,34 @@ describe_command:
|
|||
;
|
||||
|
||||
analyze_stmt_command:
|
||||
ANALYZE_SYM explainable_command
|
||||
ANALYZE_SYM opt_format_json explainable_command
|
||||
{
|
||||
Lex->analyze_stmt= true;
|
||||
}
|
||||
;
|
||||
|
||||
opt_extended_describe:
|
||||
/* empty */ {}
|
||||
| EXTENDED_SYM { Lex->describe|= DESCRIBE_EXTENDED; }
|
||||
EXTENDED_SYM { Lex->describe|= DESCRIBE_EXTENDED; }
|
||||
| PARTITIONS_SYM { Lex->describe|= DESCRIBE_PARTITIONS; }
|
||||
| opt_format_json {}
|
||||
;
|
||||
|
||||
opt_format_json:
|
||||
/* empty */ {}
|
||||
| FORMAT_SYM EQ ident_or_text
|
||||
{
|
||||
if (!my_strcasecmp(system_charset_info, $3.str, "JSON"))
|
||||
Lex->explain_json= true;
|
||||
else if (!my_strcasecmp(system_charset_info, $3.str, "TRADITIONAL"))
|
||||
{
|
||||
DBUG_ASSERT(Lex->explain_json==false);
|
||||
}
|
||||
else
|
||||
{
|
||||
my_error(ER_UNKNOWN_EXPLAIN_FORMAT, MYF(0), $3.str);
|
||||
MYSQL_YYABORT;
|
||||
}
|
||||
}
|
||||
;
|
||||
|
||||
opt_describe_column:
|
||||
|
@ -14063,6 +14095,7 @@ keyword:
|
|||
| EXAMINED_SYM {}
|
||||
| EXECUTE_SYM {}
|
||||
| FLUSH_SYM {}
|
||||
| FORMAT_SYM {}
|
||||
| GET_SYM {}
|
||||
| HANDLER_SYM {}
|
||||
| HELP_SYM {}
|
||||
|
|
Loading…
Add table
Reference in a new issue