File: mysql-bug45236.test

package info (click to toggle)
percona-xtrabackup 2.2.3-2.1
  • links: PTS
  • area: main
  • in suites: jessie, jessie-kfreebsd
  • size: 293,260 kB
  • ctags: 146,881
  • sloc: cpp: 1,051,960; ansic: 570,217; java: 54,595; perl: 53,495; pascal: 44,194; sh: 27,826; yacc: 15,314; python: 12,142; xml: 7,848; sql: 4,125; makefile: 1,459; awk: 785; lex: 758
file content (45 lines) | stat: -rw-r--r-- 1,441 bytes parent folder | download | duplicates (20)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
#
# Bug #45236: large blob inserts from mysqldump fail, possible memory issue ?
#
# This test consumes a significant amount of resources.
# Therefore it should be kept separated from other tests.
# Otherwise we might suffer from problems like
# Bug#43801 mysql.test takes too long, fails due to expired timeout
#           on debx86-b in PB
#

-- source include/not_embedded.inc

--disable_warnings
DROP TABLE IF EXISTS t1;
--enable_warnings

# Have to change the global variable as the session variable is
# read-only.
SET @old_max_allowed_packet= @@global.max_allowed_packet;
# ~1 MB blob length + some space for the rest of INSERT query
SET @@global.max_allowed_packet = 1024 * 1024 + 1024;

# Create a new connection since the global max_allowed_packet
# has no effect onr the current one
connect (con1, localhost, root,,);

CREATE TABLE t1(data LONGBLOB);
INSERT INTO t1 SELECT CONCAT(REPEAT('1', 1024*1024 - 27), 
                             "\'\r dummydb dummyhost");

let $outfile= $MYSQLTEST_VARDIR/tmp/bug41486.sql;
--error 0,1
remove_file $outfile;
--exec $MYSQL_DUMP --compact -t test t1 > $outfile
# Check that the mysql client does not interpret the "\r" sequence as a command
--exec $MYSQL --max_allowed_packet=1M test < $outfile 2>&1

DROP TABLE t1;

# Cleanup
disconnect con1;
--source include/wait_until_disconnected.inc
remove_file $outfile;
connection default;
SET @@global.max_allowed_packet = @old_max_allowed_packet;