1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322
|
--
-- COPY
--
-- directory paths are passed to us in environment variables
\getenv abs_srcdir PG_ABS_SRCDIR
\getenv abs_builddir PG_ABS_BUILDDIR
--- test copying in CSV mode with various styles
--- of embedded line ending characters
create temp table copytest (
style text,
test text,
filler int);
insert into copytest values('DOS',E'abc\r\ndef',1);
insert into copytest values('Unix',E'abc\ndef',2);
insert into copytest values('Mac',E'abc\rdef',3);
insert into copytest values(E'esc\\ape',E'a\\r\\\r\\\n\\nb',4);
\set filename :abs_builddir '/results/copytest.csv'
copy copytest to :'filename' csv;
create temp table copytest2 (like copytest);
copy copytest2 from :'filename' csv;
select * from copytest except select * from copytest2;
truncate copytest2;
--- same test but with an escape char different from quote char
copy copytest to :'filename' csv quote '''' escape E'\\';
copy copytest2 from :'filename' csv quote '''' escape E'\\';
select * from copytest except select * from copytest2;
-- test header line feature
create temp table copytest3 (
c1 int,
"col with , comma" text,
"col with "" quote" int);
copy copytest3 from stdin csv header;
this is just a line full of junk that would error out if parsed
1,a,1
2,b,2
\.
copy copytest3 to stdout csv header;
create temp table copytest4 (
c1 int,
"colname with tab: " text);
copy copytest4 from stdin (header);
this is just a line full of junk that would error out if parsed
1 a
2 b
\.
copy copytest4 to stdout (header);
-- test copy from with a partitioned table
create table parted_copytest (
a int,
b int,
c text
) partition by list (b);
create table parted_copytest_a1 (c text, b int, a int);
create table parted_copytest_a2 (a int, c text, b int);
alter table parted_copytest attach partition parted_copytest_a1 for values in(1);
alter table parted_copytest attach partition parted_copytest_a2 for values in(2);
-- We must insert enough rows to trigger multi-inserts. These are only
-- enabled adaptively when there are few enough partition changes.
insert into parted_copytest select x,1,'One' from generate_series(1,1000) x;
insert into parted_copytest select x,2,'Two' from generate_series(1001,1010) x;
insert into parted_copytest select x,1,'One' from generate_series(1011,1020) x;
\set filename :abs_builddir '/results/parted_copytest.csv'
copy (select * from parted_copytest order by a) to :'filename';
truncate parted_copytest;
copy parted_copytest from :'filename';
-- Ensure COPY FREEZE errors for partitioned tables.
begin;
truncate parted_copytest;
copy parted_copytest from :'filename' (freeze);
rollback;
select tableoid::regclass,count(*),sum(a) from parted_copytest
group by tableoid order by tableoid::regclass::name;
truncate parted_copytest;
-- create before insert row trigger on parted_copytest_a2
create function part_ins_func() returns trigger language plpgsql as $$
begin
return new;
end;
$$;
create trigger part_ins_trig
before insert on parted_copytest_a2
for each row
execute procedure part_ins_func();
copy parted_copytest from :'filename';
select tableoid::regclass,count(*),sum(a) from parted_copytest
group by tableoid order by tableoid::regclass::name;
truncate table parted_copytest;
create index on parted_copytest (b);
drop trigger part_ins_trig on parted_copytest_a2;
copy parted_copytest from stdin;
1 1 str1
2 2 str2
\.
-- Ensure index entries were properly added during the copy.
select * from parted_copytest where b = 1;
select * from parted_copytest where b = 2;
drop table parted_copytest;
--
-- Progress reporting for COPY
--
create table tab_progress_reporting (
name text,
age int4,
location point,
salary int4,
manager name
);
-- Add a trigger to catch and print the contents of the catalog view
-- pg_stat_progress_copy during data insertion. This allows to test
-- the validation of some progress reports for COPY FROM where the trigger
-- would fire.
create function notice_after_tab_progress_reporting() returns trigger AS
$$
declare report record;
begin
-- The fields ignored here are the ones that may not remain
-- consistent across multiple runs. The sizes reported may differ
-- across platforms, so just check if these are strictly positive.
with progress_data as (
select
relid::regclass::text as relname,
command,
type,
bytes_processed > 0 as has_bytes_processed,
bytes_total > 0 as has_bytes_total,
tuples_processed,
tuples_excluded
from pg_stat_progress_copy
where pid = pg_backend_pid())
select into report (to_jsonb(r)) as value
from progress_data r;
raise info 'progress: %', report.value::text;
return new;
end;
$$ language plpgsql;
create trigger check_after_tab_progress_reporting
after insert on tab_progress_reporting
for each statement
execute function notice_after_tab_progress_reporting();
-- Generate COPY FROM report with PIPE.
copy tab_progress_reporting from stdin;
sharon 25 (15,12) 1000 sam
sam 30 (10,5) 2000 bill
bill 20 (11,10) 1000 sharon
\.
-- Generate COPY FROM report with FILE, with some excluded tuples.
truncate tab_progress_reporting;
\set filename :abs_srcdir '/data/emp.data'
copy tab_progress_reporting from :'filename'
where (salary < 2000);
drop trigger check_after_tab_progress_reporting on tab_progress_reporting;
drop function notice_after_tab_progress_reporting();
drop table tab_progress_reporting;
-- Test header matching feature
create table header_copytest (
a int,
b int,
c text
);
-- Make sure it works with dropped columns
alter table header_copytest drop column c;
alter table header_copytest add column c text;
copy header_copytest to stdout with (header match);
copy header_copytest from stdin with (header wrong_choice);
-- works
copy header_copytest from stdin with (header match);
a b c
1 2 foo
\.
copy header_copytest (c, a, b) from stdin with (header match);
c a b
bar 3 4
\.
copy header_copytest from stdin with (header match, format csv);
a,b,c
5,6,baz
\.
-- errors
copy header_copytest (c, b, a) from stdin with (header match);
a b c
1 2 foo
\.
copy header_copytest from stdin with (header match);
a b \N
1 2 foo
\.
copy header_copytest from stdin with (header match);
a b
1 2
\.
copy header_copytest from stdin with (header match);
a b c d
1 2 foo bar
\.
copy header_copytest from stdin with (header match);
a b d
1 2 foo
\.
SELECT * FROM header_copytest ORDER BY a;
-- Drop an extra column, in the middle of the existing set.
alter table header_copytest drop column b;
-- works
copy header_copytest (c, a) from stdin with (header match);
c a
foo 7
\.
copy header_copytest (a, c) from stdin with (header match);
a c
8 foo
\.
-- errors
copy header_copytest from stdin with (header match);
a ........pg.dropped.2........ c
1 2 foo
\.
copy header_copytest (a, c) from stdin with (header match);
a c b
1 foo 2
\.
SELECT * FROM header_copytest ORDER BY a;
drop table header_copytest;
-- test COPY with overlong column defaults
create temp table oversized_column_default (
col1 varchar(5) DEFAULT 'more than 5 chars',
col2 varchar(5));
-- normal COPY should work
copy oversized_column_default from stdin;
\.
-- error if the column is excluded
copy oversized_column_default (col2) from stdin;
\.
-- error if the DEFAULT option is given
copy oversized_column_default from stdin (default '');
\.
drop table oversized_column_default;
--
-- Create partitioned table that does not allow bulk insertions, to test bugs
-- related to the reuse of BulkInsertState across partitions (only done when
-- not using bulk insert). Switching between partitions often makes it more
-- likely to encounter these bugs, so we just switch on roughly every insert
-- by having an even/odd number partition and inserting evenly distributed
-- data.
--
CREATE TABLE parted_si (
id int not null,
data text not null,
-- prevent use of bulk insert by having a volatile function
rand float8 not null default random()
)
PARTITION BY LIST((id % 2));
CREATE TABLE parted_si_p_even PARTITION OF parted_si FOR VALUES IN (0);
CREATE TABLE parted_si_p_odd PARTITION OF parted_si FOR VALUES IN (1);
-- Test that bulk relation extension handles reusing a single BulkInsertState
-- across partitions. Without the fix applied, this reliably reproduces
-- #18130 unless shared_buffers is extremely small (preventing any use of bulk
-- relation extension). See
-- https://postgr.es/m/18130-7a86a7356a75209d%40postgresql.org
-- https://postgr.es/m/257696.1695670946%40sss.pgh.pa.us
\set filename :abs_srcdir '/data/desc.data'
COPY parted_si(id, data) FROM :'filename';
-- An earlier bug (see commit b1ecb9b3fcf) could end up using a buffer from
-- the wrong partition. This test is *not* guaranteed to trigger that bug, but
-- does so when shared_buffers is small enough. To test if we encountered the
-- bug, check that the partition condition isn't violated.
SELECT tableoid::regclass, id % 2 = 0 is_even, count(*) from parted_si GROUP BY 1, 2 ORDER BY 1;
DROP TABLE parted_si;
|