Commit d6cf0934 authored by mhansson@dl145s.mysql.com's avatar mhansson@dl145s.mysql.com

Merge dl145s.mysql.com:/users/mhansson/mysql/push/bug23856/my50-bug23856

into  dl145s.mysql.com:/users/mhansson/mysql/push/bug23856/mysql-5.0o-pushee
parents b50d17a9 6530f680
......@@ -763,4 +763,51 @@ Warnings:
Warning 1260 1 line(s) were cut by GROUP_CONCAT()
SET group_concat_max_len = DEFAULT;
DROP TABLE t1;
SET group_concat_max_len= 65535;
CREATE TABLE t1( a TEXT, b INTEGER );
INSERT INTO t1 VALUES ( 'a', 0 ), ( 'b', 1 );
SELECT GROUP_CONCAT( a ORDER BY b ) FROM t1;
GROUP_CONCAT( a ORDER BY b )
a,b
SELECT GROUP_CONCAT(DISTINCT a ORDER BY b) FROM t1;
GROUP_CONCAT(DISTINCT a ORDER BY b)
a,b
SELECT GROUP_CONCAT(DISTINCT a) FROM t1;
GROUP_CONCAT(DISTINCT a)
a,b
SET group_concat_max_len= 10;
SELECT GROUP_CONCAT(a ORDER BY b) FROM t1;
GROUP_CONCAT(a ORDER BY b)
a,b
SELECT GROUP_CONCAT(DISTINCT a ORDER BY b) FROM t1;
GROUP_CONCAT(DISTINCT a ORDER BY b)
a,b
SELECT GROUP_CONCAT(DISTINCT a) FROM t1;
GROUP_CONCAT(DISTINCT a)
a,b
SET group_concat_max_len= 65535;
CREATE TABLE t2( a TEXT );
INSERT INTO t2 VALUES( REPEAT( 'a', 5000 ) );
INSERT INTO t2 VALUES( REPEAT( 'b', 5000 ) );
INSERT INTO t2 VALUES( REPEAT( 'a', 5000 ) );
SELECT LENGTH( GROUP_CONCAT( DISTINCT a ) ) FROM t2;
LENGTH( GROUP_CONCAT( DISTINCT a ) )
10001
CREATE TABLE t3( a TEXT, b INT );
INSERT INTO t3 VALUES( REPEAT( 'a', 65534 ), 1 );
INSERT INTO t3 VALUES( REPEAT( 'a', 65535 ), 2 );
INSERT INTO t3 VALUES( REPEAT( 'a', 65536 ), 3 );
Warnings:
Warning 1265 Data truncated for column 'a' at row 1
SELECT LENGTH( GROUP_CONCAT( a ) ) FROM t3 WHERE b = 1;
LENGTH( GROUP_CONCAT( a ) )
65534
SELECT LENGTH( GROUP_CONCAT( a ) ) FROM t3 WHERE b = 2;
LENGTH( GROUP_CONCAT( a ) )
65535
SELECT LENGTH( GROUP_CONCAT( a ) ) FROM t3 WHERE b = 3;
LENGTH( GROUP_CONCAT( a ) )
65535
SET group_concat_max_len= DEFAULT;
DROP TABLE t1, t2, t3;
End of 5.0 tests
......@@ -520,5 +520,35 @@ SELECT GROUP_CONCAT( a ORDER BY b ) FROM t1;
SELECT GROUP_CONCAT( DISTINCT a ORDER BY b ) FROM t1;
SET group_concat_max_len = DEFAULT;
DROP TABLE t1;
# Bug #23856:GROUP_CONCAT and ORDER BY: junk from previous rows for query on I_S
#
SET group_concat_max_len= 65535;
CREATE TABLE t1( a TEXT, b INTEGER );
INSERT INTO t1 VALUES ( 'a', 0 ), ( 'b', 1 );
SELECT GROUP_CONCAT( a ORDER BY b ) FROM t1;
SELECT GROUP_CONCAT(DISTINCT a ORDER BY b) FROM t1;
SELECT GROUP_CONCAT(DISTINCT a) FROM t1;
SET group_concat_max_len= 10;
SELECT GROUP_CONCAT(a ORDER BY b) FROM t1;
SELECT GROUP_CONCAT(DISTINCT a ORDER BY b) FROM t1;
SELECT GROUP_CONCAT(DISTINCT a) FROM t1;
SET group_concat_max_len= 65535;
CREATE TABLE t2( a TEXT );
INSERT INTO t2 VALUES( REPEAT( 'a', 5000 ) );
INSERT INTO t2 VALUES( REPEAT( 'b', 5000 ) );
INSERT INTO t2 VALUES( REPEAT( 'a', 5000 ) );
SELECT LENGTH( GROUP_CONCAT( DISTINCT a ) ) FROM t2;
CREATE TABLE t3( a TEXT, b INT );
INSERT INTO t3 VALUES( REPEAT( 'a', 65534 ), 1 );
INSERT INTO t3 VALUES( REPEAT( 'a', 65535 ), 2 );
INSERT INTO t3 VALUES( REPEAT( 'a', 65536 ), 3 );
SELECT LENGTH( GROUP_CONCAT( a ) ) FROM t3 WHERE b = 1;
SELECT LENGTH( GROUP_CONCAT( a ) ) FROM t3 WHERE b = 2;
SELECT LENGTH( GROUP_CONCAT( a ) ) FROM t3 WHERE b = 3;
SET group_concat_max_len= DEFAULT;
DROP TABLE t1, t2, t3;
--echo End of 5.0 tests
......@@ -1108,6 +1108,11 @@ public:
class Field_varstring :public Field_longstr {
public:
/*
The maximum space available in a Field_varstring, in bytes. See
length_bytes.
*/
static const int MAX_SIZE= UINT_MAX16;
/* Store number of bytes used to store length (1 or 2) */
uint32 length_bytes;
Field_varstring(char *ptr_arg,
......
......@@ -530,7 +530,21 @@ void Copy_field::set(char *to,Field *from)
}
/*
To do:
If 'save\ is set to true and the 'from' is a blob field, do_copy is set to
do_save_blob rather than do_conv_blob. The only differences between them
appears to be:
- do_save_blob allocates and uses an intermediate buffer before calling
Field_blob::store. Is this in order to trigger the call to
well_formed_copy_nchars, by changing the pointer copy->tmp.ptr()?
That call will take place anyway in all known cases.
- The above causes a truncation to MAX_FIELD_WIDTH. Is this the intended
effect? Truncation is handled by well_formed_copy_nchars anyway.
*/
void Copy_field::set(Field *to,Field *from,bool save)
{
if (to->type() == FIELD_TYPE_NULL)
......
......@@ -432,7 +432,7 @@ Field *Item_sum::create_tmp_field(bool group, TABLE *table,
2-byte lenght.
*/
if (max_length/collation.collation->mbmaxlen > 255 &&
convert_blob_length < UINT_MAX16 && convert_blob_length)
convert_blob_length <= Field_varstring::MAX_SIZE && convert_blob_length)
return new Field_varstring(convert_blob_length, maybe_null,
name, table,
collation.collation);
......@@ -3268,15 +3268,21 @@ bool Item_func_group_concat::setup(THD *thd)
count_field_types(tmp_table_param,all_fields,0);
tmp_table_param->force_copy_fields= force_copy_fields;
DBUG_ASSERT(table == 0);
/*
Currently we have to force conversion of BLOB values to VARCHAR's
if we are to store them in TREE objects used for ORDER BY and
DISTINCT. This leads to truncation if the BLOB's size exceeds
Field_varstring::MAX_SIZE.
*/
if (arg_count_order > 0 || distinct)
set_if_smaller(tmp_table_param->convert_blob_length,
Field_varstring::MAX_SIZE);
/*
We have to create a temporary table to get descriptions of fields
(types, sizes and so on).
Note that in the table, we first have the ORDER BY fields, then the
field list.
We need to set set_sum_field in true for storing value of blob in buffer
of a record instead of a pointer of one.
*/
if (!(table= create_tmp_table(thd, tmp_table_param, all_fields,
(ORDER*) 0, 0, TRUE,
......
......@@ -8800,7 +8800,7 @@ Field* create_tmp_field_from_field(THD *thd, Field* org_field,
Make sure that the blob fits into a Field_varstring which has
2-byte lenght.
*/
if (convert_blob_length && convert_blob_length < UINT_MAX16 &&
if (convert_blob_length && convert_blob_length <= Field_varstring::MAX_SIZE &&
(org_field->flags & BLOB_FLAG))
new_field= new Field_varstring(convert_blob_length,
org_field->maybe_null(),
......@@ -8891,7 +8891,8 @@ static Field *create_tmp_field_from_item(THD *thd, Item *item, TABLE *table,
2-byte lenght.
*/
else if (item->max_length/item->collation.collation->mbmaxlen > 255 &&
convert_blob_length < UINT_MAX16 && convert_blob_length)
convert_blob_length <= Field_varstring::MAX_SIZE &&
convert_blob_length)
new_field= new Field_varstring(convert_blob_length, maybe_null,
item->name, table,
item->collation.collation);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment