File: file_size_bytes_large.test_slow

package info (click to toggle)
duckdb 1.5.1-2
  • links: PTS, VCS
  • area: main
  • in suites:
  • size: 299,196 kB
  • sloc: cpp: 865,414; ansic: 57,292; python: 18,871; sql: 12,663; lisp: 11,751; yacc: 7,412; lex: 1,682; sh: 747; makefile: 558
file content (45 lines) | stat: -rw-r--r-- 1,167 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
# name: test/sql/copy/file_size_bytes_large.test_slow
# description: test FILE_SIZE_BYTES parameter for COPY (slow test)
# group: [copy]

statement ok
set threads=4

statement ok
CREATE TABLE bigdata AS SELECT i AS col_a, i AS col_b FROM range(0, 10000000) tbl(i);

statement ok
COPY (FROM bigdata) TO '__TEST_DIR__/file_size_bytes_csv42' (FORMAT CSV, FILE_SIZE_BYTES '1mb');

query I
SELECT COUNT(*) FROM read_csv('__TEST_DIR__/file_size_bytes_csv42/*.csv')
----
10000000

query II
SELECT AVG(col_a), AVG(col_b) FROM read_csv('__TEST_DIR__/file_size_bytes_csv42/*.csv')
----
4999999.5	4999999.5

query I
SELECT COUNT(*) BETWEEN 100 AND 200 FROM glob('__TEST_DIR__/file_size_bytes_csv42/*.csv')
----
1

statement ok
COPY (FROM bigdata) TO '__TEST_DIR__/file_size_bytes_csv43' (FORMAT CSV, FILE_SIZE_BYTES '1mb', PER_THREAD_OUTPUT TRUE);

query I
SELECT COUNT(*) FROM read_csv_auto('__TEST_DIR__/file_size_bytes_csv43/*.csv')
----
10000000

query II
SELECT AVG(col_a), AVG(col_b) FROM read_csv('__TEST_DIR__/file_size_bytes_csv43/*.csv')
----
4999999.5	4999999.5

query I
SELECT COUNT(*) BETWEEN 100 AND 200 FROM glob('__TEST_DIR__/file_size_bytes_csv43/*.csv')
----
1