diff --git a/mysql-test/main/long_unique_bugs.result b/mysql-test/main/long_unique_bugs.result index eb60f79ac67..ebc7caa839d 100644 --- a/mysql-test/main/long_unique_bugs.result +++ b/mysql-test/main/long_unique_bugs.result @@ -454,5 +454,29 @@ create table t1 (f text not null, unique (f)); insert into t1 (f) select 'f'; drop table t1; # +# MDEV-32012 hash unique corrupts index on virtual blobs +# +create table t1 ( +f1 varchar(25), +v1 mediumtext generated always as (concat('f1:', f1)) virtual, +unique key (f1) using hash, +key (v1(1000)) +); +flush status; +insert ignore t1 (f1) values (9599),(94410); +show status like 'handler_read_next'; +Variable_name Value +Handler_read_next 1 +# the above MUST BE =1 +check table t1 extended; +Table Op Msg_type Msg_text +test.t1 check status OK +update t1 set f1=100 where f1=9599; +update t1 set f1=9599 where f1=100; +check table t1 extended; +Table Op Msg_type Msg_text +test.t1 check status OK +drop table t1; +# # End of 10.4 tests # diff --git a/mysql-test/main/long_unique_bugs.test b/mysql-test/main/long_unique_bugs.test index 0359ac7e7ed..d9ab36d588e 100644 --- a/mysql-test/main/long_unique_bugs.test +++ b/mysql-test/main/long_unique_bugs.test @@ -455,6 +455,30 @@ create table t1 (f text not null, unique (f)); insert into t1 (f) select 'f'; drop table t1; +--echo # +--echo # MDEV-32012 hash unique corrupts index on virtual blobs +--echo # +create table t1 ( + f1 varchar(25), + v1 mediumtext generated always as (concat('f1:', f1)) virtual, + unique key (f1) using hash, + key (v1(1000)) +); +flush status; +insert ignore t1 (f1) values (9599),(94410); +# handler_read_next must be 1 below, meaning there was a hash collision above. +# if a change in the hash function causes these values not to collide anymore, +# the test must be adjusted to use some other values that collide. +# to find a collision add an assert into check_duplicate_long_entry_key() +# and run, like, insert...select * seq_from_1_to_1000000000 +show status like 'handler_read_next'; +--echo # the above MUST BE =1 +check table t1 extended; +update t1 set f1=100 where f1=9599; +update t1 set f1=9599 where f1=100; +check table t1 extended; +drop table t1; + --echo # --echo # End of 10.4 tests --echo # diff --git a/sql/handler.cc b/sql/handler.cc index 2b081df8326..ca5a3e28c28 100644 --- a/sql/handler.cc +++ b/sql/handler.cc @@ -6658,6 +6658,7 @@ static int check_duplicate_long_entry_key(TABLE *table, handler *h, KEY *key_info= table->key_info + key_no; hash_field= key_info->key_part->field; uchar ptr[HA_HASH_KEY_LENGTH_WITH_NULL]; + String *blob_storage; DBUG_ASSERT((key_info->flags & HA_NULL_PART_KEY && key_info->key_length == HA_HASH_KEY_LENGTH_WITH_NULL) @@ -6675,6 +6676,8 @@ static int check_duplicate_long_entry_key(TABLE *table, handler *h, result= h->ha_index_init(key_no, 0); if (result) return result; + blob_storage= (String*)alloca(sizeof(String)*table->s->virtual_not_stored_blob_fields); + table->remember_blob_values(blob_storage); store_record(table, check_unique_buf); result= h->ha_index_read_map(table->record[0], ptr, HA_WHOLE_KEY, HA_READ_KEY_EXACT); @@ -6685,6 +6688,13 @@ static int check_duplicate_long_entry_key(TABLE *table, handler *h, Item_func_hash * temp= (Item_func_hash *)hash_field->vcol_info->expr; Item ** arguments= temp->arguments(); uint arg_count= temp->argument_count(); + // restore pointers after swap_values in TABLE::update_virtual_fields() + for (Field **vf= table->vfield; *vf; vf++) + { + if (!(*vf)->stored_in_db() && (*vf)->flags & BLOB_FLAG && + bitmap_is_set(table->read_set, (*vf)->field_index)) + ((Field_blob*)*vf)->swap_value_and_read_value(); + } do { my_ptrdiff_t diff= table->check_unique_buf - new_rec; @@ -6731,6 +6741,7 @@ exit: } } restore_record(table, check_unique_buf); + table->restore_blob_values(blob_storage); h->ha_index_end(); return error; }