revision-id: 8e804c24b79d2e31138dcc6d9dc4ae3ea98bd948 (mariadb-10.6.1-130-g8e804c24b79) parent(s): 0fe2cfd45e616e93d47aa9e224db468f32dd391a author: Sergei Petrunia committer: Sergei Petrunia timestamp: 2021-09-13 14:55:10 +0300 message: MDEV-26590: Stack smashing/buffer overflow in Histogram_json_hb::parse Provide buffer of sufficent size. --- mysql-test/main/statistics_json.test | 13 ++++++++++++- sql/opt_histogram_json.cc | 13 ++++++++----- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/mysql-test/main/statistics_json.test b/mysql-test/main/statistics_json.test index f279296b7d5..ecaeddbb3e6 100644 --- a/mysql-test/main/statistics_json.test +++ b/mysql-test/main/statistics_json.test @@ -170,5 +170,16 @@ set histogram_size=10, histogram_type='json_hb'; analyze table t10 persistent for all; select histogram from mysql.column_stats where table_name='t10' and db_name=database(); - drop table t10; + +--echo # +--echo # MDEV-26590: Stack smashing/buffer overflow in Histogram_json_hb::parse upon UPDATE on table with long VARCHAR +--echo # + +CREATE TABLE t1 (b INT, a VARCHAR(3176)); +INSERT INTO t1 VALUES (1,'foo'),(2,'bar'); +SET histogram_type= JSON_HB; +ANALYZE TABLE t1 PERSISTENT FOR ALL; +SELECT * FROM t1; +drop table t1; + diff --git a/sql/opt_histogram_json.cc b/sql/opt_histogram_json.cc index 979fca22a40..d4db1b95e53 100644 --- a/sql/opt_histogram_json.cc +++ b/sql/opt_histogram_json.cc @@ -272,6 +272,7 @@ bool Histogram_json_hb::parse(MEM_ROOT *mem_root, Field *field, int obj1_len; double cumulative_size= 0.0; size_t end_member_index= (size_t)-1; + StringBuffer<128> value_buf; if (JSV_OBJECT != json_type(hist_data, hist_data + hist_data_len, &obj1, &obj1_len)) @@ -371,12 +372,13 @@ bool Histogram_json_hb::parse(MEM_ROOT *mem_root, Field *field, } - uchar buf[MAX_KEY_LENGTH]; + //uchar buf[MAX_KEY_LENGTH]; uint len_to_copy= field->key_length(); field->store_text(val, val_len, &my_charset_bin); - uint bytes= field->get_key_image(buf, len_to_copy, Field::itRAW); + value_buf.alloc(field->pack_length()); + uint bytes= field->get_key_image((uchar*)value_buf.ptr(), len_to_copy, Field::itRAW); - buckets.push_back({std::string((char*)buf, bytes), cumulative_size, + buckets.push_back({std::string(value_buf.ptr(), bytes), cumulative_size, ndv_ll}); // Read the "end" field @@ -393,8 +395,9 @@ bool Histogram_json_hb::parse(MEM_ROOT *mem_root, Field *field, if (ret != JSV_NOTHING) { field->store_text(end_val, end_val_len, &my_charset_bin); - uint bytes= field->get_key_image(buf, len_to_copy, Field::itRAW); - last_bucket_end_endp.assign((char*)buf, bytes); + value_buf.alloc(field->pack_length()); + uint bytes= field->get_key_image((uchar*)value_buf.ptr(), len_to_copy, Field::itRAW); + last_bucket_end_endp.assign(value_buf.ptr(), bytes); if (end_member_index == (size_t)-1) end_member_index= buckets.size(); }