File: log_slow_admin_statements_func.test

package info (click to toggle)
percona-xtrabackup 2.2.3-2.1
  • links: PTS
  • area: main
  • in suites: jessie, jessie-kfreebsd
  • size: 293,260 kB
  • ctags: 146,881
  • sloc: cpp: 1,051,960; ansic: 570,217; java: 54,595; perl: 53,495; pascal: 44,194; sh: 27,826; yacc: 15,314; python: 12,142; xml: 7,848; sql: 4,125; makefile: 1,459; awk: 785; lex: 758
file content (41 lines) | stat: -rw-r--r-- 1,395 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
# check that CSV engine was compiled in
--source include/have_csv.inc

SET @old_log_output=                @@global.log_output;
SET @old_slow_query_log=            @@global.slow_query_log;
SET @old_long_query_time=           @@session.long_query_time;
SET @old_log_slow_admin_statements= @@global.log_slow_admin_statements;

USE test;
CREATE TABLE log_slow_admin_statements (
	i INT PRIMARY KEY,
	j INT
);

# enable slow logging to table
SET GLOBAL log_output = 'file,table';
SET GLOBAL slow_query_log = on;
SET SESSION long_query_time = 0;
SET GLOBAL log_slow_admin_statements = on;

# test ALTER, OPTIMIZE and ANALYZE against the table show up
ALTER TABLE log_slow_admin_statements ADD COLUMN k INT;

# add some rows so OPTIMIZE runs
INSERT INTO log_slow_admin_statements VALUES (1,2,3), (4,5,6);
OPTIMIZE TABLE log_slow_admin_statements;

# and again so ANALYZE runs
INSERT INTO log_slow_admin_statements VALUES (7,8,9), (10,11,12);
ANALYZE TABLE log_slow_admin_statements;

DROP TABLE log_slow_admin_statements;

SELECT sql_text FROM mysql.slow_log WHERE sql_text LIKE '%TABLE log_slow_admin_statements%';

SET @@global.log_output=                @old_log_output;
SET @@global.slow_query_log=            @old_slow_query_log;
SET @@session.long_query_time=          @old_long_query_time;
SET @@global.log_slow_admin_statements= @old_log_slow_admin_statements;

TRUNCATE TABLE mysql.slow_log;