Commit 157c1bc5 authored by unknown's avatar unknown

Merge work:/home/bk/mysql-4.0 into hundin.mysql.fi:/my/bk/mysql-4.0


mysql-test/mysql-test-run.sh:
  Auto merged
parents bcc51cb0 a96e1b69
......@@ -43,7 +43,7 @@
**********************************************************************/
#define MTEST_VERSION "1.9"
#define MTEST_VERSION "1.10"
#include <global.h>
#include <my_sys.h>
......@@ -84,7 +84,7 @@
static int record = 0, verbose = 0, silent = 0, opt_sleep=0;
static char *db = 0, *pass=0;
const char* user = 0, *host = 0, *unix_sock = 0;
static int port = 0;
static int port = 0, opt_big_test=0;
static uint start_lineno, *lineno;
static char **default_argv;
......@@ -1410,6 +1410,7 @@ struct option long_options[] =
{
{"debug", optional_argument, 0, '#'},
{"database", required_argument, 0, 'D'},
{"big-test", no_argument, 0, 'B'},
{"help", no_argument, 0, '?'},
{"host", required_argument, 0, 'h'},
{"password", optional_argument, 0, 'p'},
......@@ -1453,6 +1454,7 @@ void usage()
-u, --user=... User for login.\n\
-p[password], --password[=...]\n\
Password to use when connecting to server.\n\
-B, --big-test Define BIG_TEST to 1\n\
-D, --database=... Database to use.\n\
-P, --port=... Port number to use for connection.\n\
-S, --socket=... Socket file to use for connection.\n\
......@@ -1475,7 +1477,7 @@ int parse_args(int argc, char **argv)
load_defaults("my",load_default_groups,&argc,&argv);
default_argv= argv;
while((c = getopt_long(argc, argv, "h:p::u:P:D:S:R:x:t:T:#:?rvVq",
while((c = getopt_long(argc, argv, "h:p::u:BP:D:S:R:x:t:T:#:?rvVq",
long_options, &option_index)) != EOF)
{
switch(c) {
......@@ -1508,6 +1510,9 @@ int parse_args(int argc, char **argv)
else
tty_password=1;
break;
case 'B':
opt_big_test=1;
break;
case 'P':
port = atoi(optarg);
break;
......@@ -1814,6 +1819,7 @@ static void var_from_env(const char* name, const char* def_val)
hash_insert(&var_hash, (byte*)v);
}
static void init_var_hash()
{
if (hash_init(&var_hash, 1024, 0, 0, get_var_key, var_free, MYF(0)))
......@@ -1821,6 +1827,7 @@ static void init_var_hash()
var_from_env("MASTER_MYPORT", "9306");
var_from_env("SLAVE_MYPORT", "9307");
var_from_env("MYSQL_TEST_DIR", "/tmp");
var_from_env("BIG_TEST", opt_big_test ? "1" : "0");
}
int main(int argc, char** argv)
......
......@@ -74,6 +74,9 @@ bool lib_dispatch_command(enum enum_server_command command, NET *net,
{
THD *thd=(THD *) net->vio->dest_thd;
thd->store_globals(); // Fix if more than one connect
thd->net.last_error[0]=0; // Clear error message
thd->net.last_errno=0;
net_new_transaction(&thd->net);
return dispatch_command(command, thd, (char *) arg, length + 1);
}
......@@ -83,17 +86,17 @@ bool lib_dispatch_command(enum enum_server_command command, NET *net,
void
lib_connection_phase(NET * net, int phase)
{
THD * thd;
thd = (THD *)(net->vio->dest_thd);
if (thd)
{
switch (phase)
{
case 2:
check_connections2(thd);
break;
}
}
THD * thd;
thd = (THD *)(net->vio->dest_thd);
if (thd)
{
switch (phase)
{
case 2:
check_connections2(thd);
break;
}
}
}
}
void start_embedded_conn1(NET * net)
......
......@@ -139,6 +139,8 @@ int vio_read(Vio * vio, gptr buf, int size)
uint4korr(vio->packets + sizeof(char *));
vio->packets = *(char **)vio->packets;
}
if (vio->where_in_packet + size > vio->end_of_packet)
size = vio->end_of_packet - vio->where_in_packet;
memcpy(buf, vio->where_in_packet, size);
vio->where_in_packet += size;
return (size);
......
......@@ -152,6 +152,8 @@ while test $# -gt 0; do
DO_BENCH=1
NO_SLAVE=1
;;
--big*) # Actually --big-test
EXTRA_MYSQL_TEST_OPT="$EXTRA_MYSQL_TEST_OPT $1" ;;
--sleep=*)
EXTRA_MYSQL_TEST_OPT="$EXTRA_MYSQL_TEST_OPT $1"
SLEEP_TIME=`$ECHO "$1" | $SED -e "s;--sleep=;;"`
......
id1 t
1 3
2 2
id2 t
1 3
1 2
1 1
id3 t
2 3
2 2
2 1
1 3
1 2
1 1
Table Op Msg_type Msg_text
test.t1 check status OK
test.t2 check status OK
test.t3 check status OK
count(*)
0
count(*)
0
count(*)
0
count(*)
0
count(*)
0
count(*)
0
count(*)
0
count(*)
0
count(*)
0
-- source include/have_bdb.inc
# test for bug reported by Mark Steele
drop table if exists tblChange;
......
#
# Only run the test if we are using --big-test, because this test takes a
# long time
#
-- require r/big_test.require
eval select $BIG_TEST as using_big_test;
drop table if exists t1,t2,t3;
create table t1(id1 int not null auto_increment primary key, t char(12));
create table t2(id2 int not null, t char(12), index(id2));
......@@ -25,11 +32,20 @@ delete t1.*, t2.*, t3.* from t1,t2,t3 where t1.id1 = t2.id2 and t2.id2 = t3.id3
check table t1, t2, t3;
select * from t1 where id1 > 9500;
select * from t2 where id2 > 9500;
select * from t3 where id3 > 9500;
select count(*) from t1 where id1 > 9500;
select count(*) from t2 where id2 > 9500;
select count(*) from t3 where id3 > 9500;
delete t1, t2, t3 from t1,t2,t3 where t1.id1 = t2.id2 and t2.id2 = t3.id3 and t1.id1 > 500;
select * from t1 where id1 > 500;
select * from t2 where id2 > 500;
select * from t3 where id3 > 500;
select count(*) from t1 where id1 > 500;
select count(*) from t2 where id2 > 500;
select count(*) from t3 where id3 > 500;
delete t1, t2, t3 from t1,t2,t3 where t1.id1 = t2.id2 and t2.id2 = t3.id3 and t1.id1 > 0;
# These queries will force a scan of the table
select count(*) from t1 where id1;
select count(*) from t2 where id2;
select count(*) from t3 where id3;
drop table t1,t2,t3;
......@@ -674,7 +674,7 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
int error;
uint sort_length,offset;
ulong maxcount;
ha_rows count,max_rows;
ha_rows max_rows,org_max_rows;
my_off_t to_start_filepos;
uchar *strpos;
BUFFPEK *buffpek,**refpek;
......@@ -685,12 +685,12 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
statistic_increment(filesort_merge_passes, &LOCK_status);
count=error=0;
error=0;
offset=(sort_length=param->sort_length)-param->ref_length;
maxcount=(ulong) (param->keys/((uint) (Tb-Fb) +1));
to_start_filepos=my_b_tell(to_file);
strpos=(uchar*) sort_buffer;
max_rows=param->max_rows;
org_max_rows=max_rows=param->max_rows;
if (init_queue(&queue,(uint) (Tb-Fb)+1,offsetof(BUFFPEK,key),0,
(int (*) (void *, byte *,byte*))
......@@ -698,7 +698,6 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
DBUG_RETURN(1); /* purecov: inspected */
for (buffpek= Fb ; buffpek <= Tb ; buffpek++)
{
count+= buffpek->count;
buffpek->base= strpos;
buffpek->max_keys=maxcount;
strpos+= (uint) (error=(int) read_to_buffer(from_file,buffpek,
......@@ -725,6 +724,8 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
error=1; goto err; /* purecov: inspected */
}
buffpek->key+=sort_length;
buffpek->mem_count--;
max_rows--;
queue_replaced(&queue); // Top element has been used
}
else
......@@ -741,7 +742,8 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
buffpek=(BUFFPEK*) queue_top(&queue);
if (cmp) // Remove duplicates
{
if (!cmp(&sort_length, &(param->unique_buff), (uchar**) &buffpek->key))
if (!(*cmp)(&sort_length, &(param->unique_buff),
(uchar**) &buffpek->key))
goto skip_duplicate;
memcpy(param->unique_buff, (uchar*) buffpek->key,sort_length);
}
......@@ -795,7 +797,7 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
break; /* One buffer have been removed */
}
else if (error == -1)
goto err; /* purecov: inspected */
goto err; /* purecov: inspected */
}
queue_replaced(&queue); /* Top element has been replaced */
}
......@@ -803,6 +805,20 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
buffpek=(BUFFPEK*) queue_top(&queue);
buffpek->base= sort_buffer;
buffpek->max_keys=param->keys;
/*
As we know all entries in the buffer are unique, we only have to
check if the first one is the same as the last one we wrote
*/
if (cmp)
{
if (!(*cmp)(&sort_length, &(param->unique_buff), (uchar**) &buffpek->key))
{
buffpek->key+=sort_length; // Remove duplicate
--buffpek->mem_count;
}
}
do
{
if ((ha_rows) buffpek->mem_count > max_rows)
......@@ -810,6 +826,7 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
buffpek->mem_count=(uint) max_rows;
buffpek->count=0; /* Don't read more */
}
max_rows-=buffpek->mem_count;
if (flag == 0)
{
if (my_b_write(to_file,(byte*) buffpek->key,
......@@ -834,7 +851,7 @@ int merge_buffers(SORTPARAM *param, IO_CACHE *from_file,
!= -1 && error != 0);
end:
lastbuff->count=min(count,param->max_rows);
lastbuff->count=min(org_max_rows-max_rows,param->max_rows);
lastbuff->file_pos=to_start_filepos;
err:
delete_queue(&queue);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment