File: zlob_update_purge.test

package info (click to toggle)
mysql-8.0 8.0.43-3
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 1,273,924 kB
  • sloc: cpp: 4,684,605; ansic: 412,450; pascal: 108,398; java: 83,641; perl: 30,221; cs: 27,067; sql: 26,594; sh: 24,181; python: 21,816; yacc: 17,169; php: 11,522; xml: 7,388; javascript: 7,076; makefile: 2,194; lex: 1,075; awk: 670; asm: 520; objc: 183; ruby: 97; lisp: 86
file content (41 lines) | stat: -rw-r--r-- 1,401 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
--source include/big_test.inc
--source include/have_debug.inc
--source include/have_innodb_16k.inc

SET GLOBAL innodb_compression_level = 0;
CREATE TABLE t1 (j1 JSON) ENGINE=InnoDB ROW_FORMAT=compressed;
SHOW CREATE TABLE t1;
# must be long enough to force external storage
# also the length must be so that json_doc last "page" of last stream
# is short enough to fit on fragment page
SET @long_str = REPEAT('abcdefghijklmnopqrstuvwxyz1234', 60000);
# must be long enough to force new version (as opposed to storing diff in the undo log),
# but must be short enough to not cause complete rewrite of the blob
SET @medium_str_1 = REPEAT('a', 200);
SET @medium_str_2 = REPEAT('b', 200);
SET @json_doc = CONCAT('["', @long_str, '","', @medium_str_1 ,'" ]');

INSERT INTO t1 (j1) VALUES (@json_doc);

SELECT JSON_EXTRACT(j1, '$[1]') FROM t1;

SET GLOBAL innodb_purge_stop_now = ON;

--disable_query_log
--let i=0
while($i<50){
  UPDATE t1 SET j1 = JSON_REPLACE(j1, '$[1]', @medium_str_2);
  UPDATE t1 SET j1 = JSON_REPLACE(j1, '$[1]', @medium_str_1);
  --inc $i
}
--enable_query_log

SET GLOBAL innodb_purge_run_now = ON;

# Give it a lot of time, so that the internal time limit of wait_innodb_all_purge
# is much larger than the one provided in ./mtr --testcase-timeout=5
--let wait_timeout=3600
--source include/wait_innodb_all_purged.inc
DROP TABLE t1;
SET GLOBAL innodb_compression_level = default;