mirror of
https://github.com/MariaDB/server.git
synced 2025-01-29 10:14:19 +01:00
Merge epotemkin@bk-internal.mysql.com:/home/bk/mysql-4.1
into sunlight.local:/local_work/15560-bug-4.1-mysql sql/sql_table.cc: Auto merged
This commit is contained in:
commit
ff4de90501
8 changed files with 57 additions and 14 deletions
|
@ -589,3 +589,18 @@ GROUP_CONCAT(a ORDER BY a)
|
|||
,x
|
||||
,z
|
||||
DROP TABLE t1;
|
||||
create table t1(f1 int);
|
||||
insert into t1 values(1),(2),(3);
|
||||
select f1, group_concat(f1+1) from t1 group by f1 with rollup;
|
||||
f1 group_concat(f1+1)
|
||||
1 2
|
||||
2 3
|
||||
3 4
|
||||
NULL 2,3,4
|
||||
select count(distinct (f1+1)) from t1 group by f1 with rollup;
|
||||
count(distinct (f1+1))
|
||||
1
|
||||
1
|
||||
1
|
||||
3
|
||||
drop table t1;
|
||||
|
|
|
@ -382,4 +382,12 @@ SELECT GROUP_CONCAT(a ORDER BY a) FROM t1 GROUP BY id;
|
|||
|
||||
DROP TABLE t1;
|
||||
|
||||
#
|
||||
# Bug #15560: GROUP_CONCAT wasn't ready for WITH ROLLUP queries
|
||||
#
|
||||
create table t1(f1 int);
|
||||
insert into t1 values(1),(2),(3);
|
||||
select f1, group_concat(f1+1) from t1 group by f1 with rollup;
|
||||
select count(distinct (f1+1)) from t1 group by f1 with rollup;
|
||||
drop table t1;
|
||||
# End of 4.1 tests
|
||||
|
|
|
@ -1185,6 +1185,7 @@ void Item_sum_count_distinct::make_unique()
|
|||
original= 0;
|
||||
use_tree= 0; // to prevent delete_tree call on uninitialized tree
|
||||
tree= &tree_base;
|
||||
force_copy_fields= 1;
|
||||
}
|
||||
|
||||
|
||||
|
@ -1219,6 +1220,7 @@ bool Item_sum_count_distinct::setup(THD *thd)
|
|||
free_tmp_table(thd, table);
|
||||
tmp_table_param->cleanup();
|
||||
}
|
||||
tmp_table_param->force_copy_fields= force_copy_fields;
|
||||
if (!(table= create_tmp_table(thd, tmp_table_param, list, (ORDER*) 0, 1,
|
||||
0,
|
||||
select_lex->options | thd->options,
|
||||
|
@ -1724,6 +1726,7 @@ Item_func_group_concat::Item_func_group_concat(bool is_distinct,
|
|||
String *is_separator)
|
||||
:Item_sum(), tmp_table_param(0), max_elements_in_tree(0), warning(0),
|
||||
key_length(0), tree_mode(0), distinct(is_distinct), warning_for_row(0),
|
||||
force_copy_fields(0),
|
||||
separator(is_separator), tree(&tree_base), table(0),
|
||||
order(0), tables_list(0),
|
||||
arg_count_order(0), arg_count_field(0),
|
||||
|
@ -1785,6 +1788,7 @@ Item_func_group_concat::Item_func_group_concat(THD *thd,
|
|||
tree_mode(item->tree_mode),
|
||||
distinct(item->distinct),
|
||||
warning_for_row(item->warning_for_row),
|
||||
force_copy_fields(item->force_copy_fields),
|
||||
separator(item->separator),
|
||||
tree(item->tree),
|
||||
table(item->table),
|
||||
|
@ -2004,6 +2008,7 @@ bool Item_func_group_concat::setup(THD *thd)
|
|||
free_tmp_table(thd, table);
|
||||
tmp_table_param->cleanup();
|
||||
}
|
||||
tmp_table_param->force_copy_fields= force_copy_fields;
|
||||
/*
|
||||
We have to create a temporary table to get descriptions of fields
|
||||
(types, sizes and so on).
|
||||
|
@ -2079,6 +2084,7 @@ void Item_func_group_concat::make_unique()
|
|||
original= 0;
|
||||
tree_mode= 0; // to prevent delete_tree call on uninitialized tree
|
||||
tree= &tree_base;
|
||||
force_copy_fields= 1;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -185,6 +185,7 @@ class Item_sum_count_distinct :public Item_sum_int
|
|||
TMP_TABLE_PARAM *tmp_table_param;
|
||||
TREE tree_base;
|
||||
TREE *tree;
|
||||
bool force_copy_fields;
|
||||
/*
|
||||
Following is 0 normal object and pointer to original one for copy
|
||||
(to correctly free resources)
|
||||
|
@ -226,15 +227,16 @@ class Item_sum_count_distinct :public Item_sum_int
|
|||
public:
|
||||
Item_sum_count_distinct(List<Item> &list)
|
||||
:Item_sum_int(list), table(0), used_table_cache(~(table_map) 0),
|
||||
tmp_table_param(0), tree(&tree_base), original(0), use_tree(0),
|
||||
always_null(0)
|
||||
tmp_table_param(0), tree(&tree_base), force_copy_fields(0), original(0),
|
||||
use_tree(0), always_null(0)
|
||||
{ quick_group= 0; }
|
||||
Item_sum_count_distinct(THD *thd, Item_sum_count_distinct *item)
|
||||
:Item_sum_int(thd, item), table(item->table),
|
||||
used_table_cache(item->used_table_cache),
|
||||
field_lengths(item->field_lengths),
|
||||
tmp_table_param(item->tmp_table_param),
|
||||
tree(item->tree), original(item), key_length(item->key_length),
|
||||
tree(item->tree), force_copy_fields(item->force_copy_fields),
|
||||
original(item), key_length(item->key_length),
|
||||
max_elements_in_tree(item->max_elements_in_tree),
|
||||
rec_offset(item->rec_offset), use_tree(item->use_tree),
|
||||
always_null(item->always_null)
|
||||
|
@ -685,6 +687,7 @@ class Item_func_group_concat : public Item_sum
|
|||
bool distinct;
|
||||
bool warning_for_row;
|
||||
bool always_null;
|
||||
bool force_copy_fields;
|
||||
|
||||
friend int group_concat_key_cmp_with_distinct(void* arg, byte* key1,
|
||||
byte* key2);
|
||||
|
|
|
@ -537,8 +537,9 @@ int mysql_union(THD *thd, LEX *lex, select_result *result,
|
|||
SELECT_LEX_UNIT *unit);
|
||||
int mysql_handle_derived(LEX *lex);
|
||||
Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
Item ***copy_func, Field **from_field,
|
||||
bool group, bool modify_item, uint convert_blob_length);
|
||||
Item ***copy_func, Field **from_field,
|
||||
bool group, bool modify_item, uint convert_blob_length,
|
||||
bool make_copy_field);
|
||||
int mysql_prepare_table(THD *thd, HA_CREATE_INFO *create_info,
|
||||
List<create_field> &fields,
|
||||
List<Key> &keys, uint &db_options,
|
||||
|
|
|
@ -1334,10 +1334,11 @@ public:
|
|||
bool using_indirect_summary_function;
|
||||
/* If >0 convert all blob fields to varchar(convert_blob_length) */
|
||||
uint convert_blob_length;
|
||||
|
||||
bool force_copy_fields;
|
||||
TMP_TABLE_PARAM()
|
||||
:copy_field(0), group_parts(0),
|
||||
group_length(0), group_null_parts(0), convert_blob_length(0)
|
||||
group_length(0), group_null_parts(0), convert_blob_length(0),
|
||||
force_copy_fields(0)
|
||||
{}
|
||||
~TMP_TABLE_PARAM()
|
||||
{
|
||||
|
|
|
@ -4984,7 +4984,8 @@ static Field* create_tmp_field_from_item(THD *thd, Item *item, TABLE *table,
|
|||
|
||||
Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
Item ***copy_func, Field **from_field,
|
||||
bool group, bool modify_item, uint convert_blob_length)
|
||||
bool group, bool modify_item, uint convert_blob_length,
|
||||
bool make_copy_field)
|
||||
{
|
||||
switch (type) {
|
||||
case Item::SUM_FUNC_ITEM:
|
||||
|
@ -5071,7 +5072,13 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
|||
case Item::REF_ITEM:
|
||||
case Item::NULL_ITEM:
|
||||
case Item::VARBIN_ITEM:
|
||||
return create_tmp_field_from_item(thd, item, table, copy_func, modify_item,
|
||||
if (make_copy_field)
|
||||
{
|
||||
DBUG_ASSERT(((Item_result_field*)item)->result_field);
|
||||
*from_field= ((Item_result_field*)item)->result_field;
|
||||
}
|
||||
return create_tmp_field_from_item(thd, item, table, (make_copy_field ? 0 :
|
||||
copy_func), modify_item,
|
||||
convert_blob_length);
|
||||
case Item::TYPE_HOLDER:
|
||||
return ((Item_type_holder *)item)->make_field_by_type(table);
|
||||
|
@ -5110,7 +5117,7 @@ create_tmp_table(THD *thd,TMP_TABLE_PARAM *param,List<Item> &fields,
|
|||
Item **copy_func;
|
||||
MI_COLUMNDEF *recinfo;
|
||||
uint temp_pool_slot=MY_BIT_NONE;
|
||||
|
||||
bool force_copy_fields= param->force_copy_fields;
|
||||
DBUG_ENTER("create_tmp_table");
|
||||
DBUG_PRINT("enter",("distinct: %d save_sum_fields: %d rows_limit: %lu group: %d",
|
||||
(int) distinct, (int) save_sum_fields,
|
||||
|
@ -5241,7 +5248,7 @@ create_tmp_table(THD *thd,TMP_TABLE_PARAM *param,List<Item> &fields,
|
|||
Field *new_field=
|
||||
create_tmp_field(thd, table, arg, arg->type(), ©_func,
|
||||
tmp_from_field, group != 0,not_all_columns,
|
||||
param->convert_blob_length);
|
||||
param->convert_blob_length, 0);
|
||||
if (!new_field)
|
||||
goto err; // Should be OOM
|
||||
tmp_from_field++;
|
||||
|
@ -5279,8 +5286,10 @@ create_tmp_table(THD *thd,TMP_TABLE_PARAM *param,List<Item> &fields,
|
|||
*/
|
||||
Field *new_field= create_tmp_field(thd, table, item, type, ©_func,
|
||||
tmp_from_field, group != 0,
|
||||
not_all_columns || group !=0,
|
||||
param->convert_blob_length);
|
||||
!force_copy_fields &&
|
||||
(not_all_columns || group !=0),
|
||||
param->convert_blob_length,
|
||||
force_copy_fields);
|
||||
if (!new_field)
|
||||
{
|
||||
if (thd->is_fatal_error)
|
||||
|
|
|
@ -1585,7 +1585,7 @@ TABLE *create_table_from_items(THD *thd, HA_CREATE_INFO *create_info,
|
|||
field=item->tmp_table_field(&tmp_table);
|
||||
else
|
||||
field=create_tmp_field(thd, &tmp_table, item, item->type(),
|
||||
(Item ***) 0, &tmp_field, 0, 0, 0);
|
||||
(Item ***) 0, &tmp_field, 0, 0, 0, 0);
|
||||
if (!field ||
|
||||
!(cr_field=new create_field(field,(item->type() == Item::FIELD_ITEM ?
|
||||
((Item_field *)item)->field :
|
||||
|
|
Loading…
Add table
Reference in a new issue