File: test_sql.py

package info (click to toggle)
python-geopandas 0.12.2-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 13,464 kB
  • sloc: python: 21,174; makefile: 149; sh: 25
file content (699 lines) | stat: -rw-r--r-- 23,132 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
"""
Tests here include reading/writing to different types of spatial databases.
The spatial database tests may not work without additional system
configuration. postGIS tests require a test database to have been setup;
see geopandas.tests.util for more information.
"""
import os

import pandas as pd

import geopandas
from geopandas import GeoDataFrame, read_file, read_postgis

from geopandas.io.sql import _get_conn as get_conn, _write_postgis as write_postgis
from geopandas.tests.util import create_postgis, create_spatialite, validate_boro_df
import pytest


@pytest.fixture
def df_nybb():
    nybb_path = geopandas.datasets.get_path("nybb")
    df = read_file(nybb_path)
    return df


@pytest.fixture()
def connection_postgis():
    """
    Initiates a connection to a postGIS database that must already exist.
    See create_postgis for more information.
    """
    psycopg2 = pytest.importorskip("psycopg2")
    from psycopg2 import OperationalError

    dbname = "test_geopandas"
    user = os.environ.get("PGUSER")
    password = os.environ.get("PGPASSWORD")
    host = os.environ.get("PGHOST")
    port = os.environ.get("PGPORT")
    try:
        con = psycopg2.connect(
            dbname=dbname, user=user, password=password, host=host, port=port
        )
    except OperationalError:
        pytest.skip("Cannot connect with postgresql database")

    yield con
    con.close()


@pytest.fixture()
def engine_postgis():
    """
    Initiates a connection engine to a postGIS database that must already exist.
    """
    sqlalchemy = pytest.importorskip("sqlalchemy")
    from sqlalchemy.engine.url import URL

    user = os.environ.get("PGUSER")
    password = os.environ.get("PGPASSWORD")
    host = os.environ.get("PGHOST")
    port = os.environ.get("PGPORT")
    dbname = "test_geopandas"

    try:
        con = sqlalchemy.create_engine(
            URL.create(
                drivername="postgresql+psycopg2",
                username=user,
                database=dbname,
                password=password,
                host=host,
                port=port,
            )
        )
        con.begin()
    except Exception:
        pytest.skip("Cannot connect with postgresql database")

    yield con
    con.dispose()


@pytest.fixture()
def connection_spatialite():
    """
    Return a memory-based SQLite3 connection with SpatiaLite enabled & initialized.

    `The sqlite3 module must be built with loadable extension support
    <https://docs.python.org/3/library/sqlite3.html#f1>`_ and
    `SpatiaLite <https://www.gaia-gis.it/fossil/libspatialite/index>`_
    must be available on the system as a SQLite module.
    Packages available on Anaconda meet requirements.

    Exceptions
    ----------
    ``AttributeError`` on missing support for loadable SQLite extensions
    ``sqlite3.OperationalError`` on missing SpatiaLite
    """
    sqlite3 = pytest.importorskip("sqlite3")
    try:
        with sqlite3.connect(":memory:") as con:
            con.enable_load_extension(True)
            con.load_extension("mod_spatialite")
            con.execute("SELECT InitSpatialMetaData(TRUE)")
    except Exception:
        con.close()
        pytest.skip("Cannot setup spatialite database")

    yield con
    con.close()


def drop_table_if_exists(conn_or_engine, table):
    sqlalchemy = pytest.importorskip("sqlalchemy")

    if sqlalchemy.inspect(conn_or_engine).has_table(table):
        metadata = sqlalchemy.MetaData(conn_or_engine)
        metadata.reflect()
        table = metadata.tables.get(table)
        if table is not None:
            table.drop(checkfirst=True)


@pytest.fixture
def df_mixed_single_and_multi():
    from shapely.geometry import Point, LineString, MultiLineString

    df = geopandas.GeoDataFrame(
        {
            "geometry": [
                LineString([(0, 0), (1, 1)]),
                MultiLineString([[(0, 0), (1, 1)], [(2, 2), (3, 3)]]),
                Point(0, 1),
            ]
        },
        crs="epsg:4326",
    )
    return df


@pytest.fixture
def df_geom_collection():
    from shapely.geometry import Point, LineString, Polygon, GeometryCollection

    df = geopandas.GeoDataFrame(
        {
            "geometry": [
                GeometryCollection(
                    [
                        Polygon([(0, 0), (1, 1), (0, 1)]),
                        LineString([(0, 0), (1, 1)]),
                        Point(0, 0),
                    ]
                )
            ]
        },
        crs="epsg:4326",
    )
    return df


@pytest.fixture
def df_linear_ring():
    from shapely.geometry import LinearRing

    df = geopandas.GeoDataFrame(
        {"geometry": [LinearRing(((0, 0), (0, 1), (1, 1), (1, 0)))]}, crs="epsg:4326"
    )
    return df


@pytest.fixture
def df_3D_geoms():
    from shapely.geometry import Point, LineString, Polygon

    df = geopandas.GeoDataFrame(
        {
            "geometry": [
                LineString([(0, 0, 0), (1, 1, 1)]),
                Polygon([(0, 0, 0), (1, 1, 1), (0, 1, 1)]),
                Point(0, 1, 2),
            ]
        },
        crs="epsg:4326",
    )
    return df


class TestIO:
    def test_get_conn(self, engine_postgis):
        Connection = pytest.importorskip("sqlalchemy.engine.base").Connection

        engine = engine_postgis
        with get_conn(engine) as output:
            assert isinstance(output, Connection)
        with engine.connect() as conn:
            with get_conn(conn) as output:
                assert isinstance(output, Connection)
        with pytest.raises(ValueError):
            with get_conn(object()):
                pass

    def test_read_postgis_default(self, connection_postgis, df_nybb):
        con = connection_postgis
        create_postgis(con, df_nybb)

        sql = "SELECT * FROM nybb;"
        df = read_postgis(sql, con)

        validate_boro_df(df)
        # no crs defined on the created geodatabase, and none specified
        # by user; should not be set to 0, as from get_srid failure
        assert df.crs is None

    def test_read_postgis_custom_geom_col(self, connection_postgis, df_nybb):
        con = connection_postgis
        geom_col = "the_geom"
        create_postgis(con, df_nybb, geom_col=geom_col)

        sql = "SELECT * FROM nybb;"
        df = read_postgis(sql, con, geom_col=geom_col)

        validate_boro_df(df)

    def test_read_postgis_select_geom_as(self, connection_postgis, df_nybb):
        """Tests that a SELECT {geom} AS {some_other_geom} works."""
        con = connection_postgis
        orig_geom = "geom"
        out_geom = "the_geom"
        create_postgis(con, df_nybb, geom_col=orig_geom)

        sql = """SELECT borocode, boroname, shape_leng, shape_area,
                    {} as {} FROM nybb;""".format(
            orig_geom, out_geom
        )
        df = read_postgis(sql, con, geom_col=out_geom)

        validate_boro_df(df)

    def test_read_postgis_get_srid(self, connection_postgis, df_nybb):
        """Tests that an SRID can be read from a geodatabase (GH #451)."""
        con = connection_postgis
        crs = "epsg:4269"
        df_reproj = df_nybb.to_crs(crs)
        create_postgis(con, df_reproj, srid=4269)

        sql = "SELECT * FROM nybb;"
        df = read_postgis(sql, con)

        validate_boro_df(df)
        assert df.crs == crs

    def test_read_postgis_override_srid(self, connection_postgis, df_nybb):
        """Tests that a user specified CRS overrides the geodatabase SRID."""
        con = connection_postgis
        orig_crs = df_nybb.crs
        create_postgis(con, df_nybb, srid=4269)

        sql = "SELECT * FROM nybb;"
        df = read_postgis(sql, con, crs=orig_crs)

        validate_boro_df(df)
        assert df.crs == orig_crs

    def test_from_postgis_default(self, connection_postgis, df_nybb):
        con = connection_postgis
        create_postgis(con, df_nybb)

        sql = "SELECT * FROM nybb;"
        df = GeoDataFrame.from_postgis(sql, con)

        validate_boro_df(df, case_sensitive=False)

    def test_from_postgis_custom_geom_col(self, connection_postgis, df_nybb):
        con = connection_postgis
        geom_col = "the_geom"
        create_postgis(con, df_nybb, geom_col=geom_col)

        sql = "SELECT * FROM nybb;"
        df = GeoDataFrame.from_postgis(sql, con, geom_col=geom_col)

        validate_boro_df(df, case_sensitive=False)

    def test_read_postgis_null_geom(self, connection_spatialite, df_nybb):
        """Tests that geometry with NULL is accepted."""
        con = connection_spatialite
        geom_col = df_nybb.geometry.name
        df_nybb.geometry.iat[0] = None
        create_spatialite(con, df_nybb)
        sql = (
            "SELECT ogc_fid, borocode, boroname, shape_leng, shape_area, "
            'AsEWKB("{0}") AS "{0}" FROM nybb'.format(geom_col)
        )
        df = read_postgis(sql, con, geom_col=geom_col)
        validate_boro_df(df)

    def test_read_postgis_binary(self, connection_spatialite, df_nybb):
        """Tests that geometry read as binary is accepted."""
        con = connection_spatialite
        geom_col = df_nybb.geometry.name
        create_spatialite(con, df_nybb)
        sql = (
            "SELECT ogc_fid, borocode, boroname, shape_leng, shape_area, "
            'ST_AsBinary("{0}") AS "{0}" FROM nybb'.format(geom_col)
        )
        df = read_postgis(sql, con, geom_col=geom_col)
        validate_boro_df(df)

    def test_read_postgis_chunksize(self, connection_postgis, df_nybb):
        """Test chunksize argument"""
        chunksize = 2
        con = connection_postgis
        create_postgis(con, df_nybb)

        sql = "SELECT * FROM nybb;"
        df = pd.concat(read_postgis(sql, con, chunksize=chunksize))

        validate_boro_df(df)
        # no crs defined on the created geodatabase, and none specified
        # by user; should not be set to 0, as from get_srid failure
        assert df.crs is None

    def test_read_postgis_privacy(self, connection_postgis, df_nybb):
        con = connection_postgis
        create_postgis(con, df_nybb)

        sql = "SELECT * FROM nybb;"
        with pytest.warns(FutureWarning):
            geopandas.io.sql.read_postgis(sql, con)

    def test_write_postgis_default(self, engine_postgis, df_nybb):
        """Tests that GeoDataFrame can be written to PostGIS with defaults."""
        engine = engine_postgis
        table = "nybb"

        # If table exists, delete it before trying to write with defaults
        drop_table_if_exists(engine, table)

        # Write to db
        write_postgis(df_nybb, con=engine, name=table, if_exists="fail")
        # Validate
        sql = "SELECT * FROM {table};".format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")
        validate_boro_df(df)

    def test_write_postgis_uppercase_tablename(self, engine_postgis, df_nybb):
        """Tests writing GeoDataFrame to PostGIS with uppercase tablename."""
        engine = engine_postgis
        table = "aTestTable"

        # If table exists, delete it before trying to write with defaults
        drop_table_if_exists(engine, table)

        # Write to db
        write_postgis(df_nybb, con=engine, name=table, if_exists="fail")
        # Validate
        sql = 'SELECT * FROM "{table}";'.format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")
        validate_boro_df(df)

    def test_write_postgis_sqlalchemy_connection(self, engine_postgis, df_nybb):
        """Tests that GeoDataFrame can be written to PostGIS with defaults."""
        with engine_postgis.begin() as con:
            table = "nybb_con"

            # If table exists, delete it before trying to write with defaults
            drop_table_if_exists(con, table)

            # Write to db
            write_postgis(df_nybb, con=con, name=table, if_exists="fail")
            # Validate
            sql = "SELECT * FROM {table};".format(table=table)
            df = read_postgis(sql, con, geom_col="geometry")
            validate_boro_df(df)

    def test_write_postgis_fail_when_table_exists(self, engine_postgis, df_nybb):
        """
        Tests that uploading the same table raises error when: if_replace='fail'.
        """
        engine = engine_postgis

        table = "nybb"

        # Ensure table exists
        write_postgis(df_nybb, con=engine, name=table, if_exists="replace")

        try:
            write_postgis(df_nybb, con=engine, name=table, if_exists="fail")
        except ValueError as e:
            if "already exists" in str(e):
                pass
            else:
                raise e

    def test_write_postgis_replace_when_table_exists(self, engine_postgis, df_nybb):
        """
        Tests that replacing a table is possible when: if_replace='replace'.
        """
        engine = engine_postgis

        table = "nybb"

        # Ensure table exists
        write_postgis(df_nybb, con=engine, name=table, if_exists="replace")
        # Overwrite
        write_postgis(df_nybb, con=engine, name=table, if_exists="replace")
        # Validate
        sql = "SELECT * FROM {table};".format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")
        validate_boro_df(df)

    def test_write_postgis_append_when_table_exists(self, engine_postgis, df_nybb):
        """
        Tests that appending to existing table produces correct results when:
        if_replace='append'.
        """
        engine = engine_postgis

        table = "nybb"

        orig_rows, orig_cols = df_nybb.shape
        write_postgis(df_nybb, con=engine, name=table, if_exists="replace")
        write_postgis(df_nybb, con=engine, name=table, if_exists="append")
        # Validate
        sql = "SELECT * FROM {table};".format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")
        new_rows, new_cols = df.shape

        # There should be twice as many rows in the new table
        assert new_rows == orig_rows * 2, (
            "There should be {target} rows,"
            "found: {current}".format(target=orig_rows * 2, current=new_rows),
        )
        # Number of columns should stay the same
        assert new_cols == orig_cols, (
            "There should be {target} columns,"
            "found: {current}".format(target=orig_cols, current=new_cols),
        )

    def test_write_postgis_without_crs(self, engine_postgis, df_nybb):
        """
        Tests that GeoDataFrame can be written to PostGIS without CRS information.
        """
        engine = engine_postgis

        table = "nybb"

        # Write to db
        df_nybb = df_nybb
        df_nybb.crs = None
        write_postgis(df_nybb, con=engine, name=table, if_exists="replace")
        # Validate that srid is -1
        target_srid = engine.execute(
            "SELECT Find_SRID('{schema}', '{table}', '{geom_col}');".format(
                schema="public", table=table, geom_col="geometry"
            )
        ).fetchone()[0]
        assert target_srid == 0, "SRID should be 0, found %s" % target_srid

    def test_write_postgis_with_esri_authority(self, engine_postgis, df_nybb):
        """
        Tests that GeoDataFrame can be written to PostGIS with ESRI Authority
        CRS information (GH #2414).
        """
        engine = engine_postgis

        table = "nybb"

        # Write to db
        df_nybb_esri = df_nybb.to_crs("ESRI:102003")
        write_postgis(df_nybb_esri, con=engine, name=table, if_exists="replace")
        # Validate that srid is 102003
        target_srid = engine.execute(
            "SELECT Find_SRID('{schema}', '{table}', '{geom_col}');".format(
                schema="public", table=table, geom_col="geometry"
            )
        ).fetchone()[0]
        assert target_srid == 102003, "SRID should be 102003, found %s" % target_srid

    def test_write_postgis_geometry_collection(
        self, engine_postgis, df_geom_collection
    ):
        """
        Tests that writing a mix of different geometry types is possible.
        """
        engine = engine_postgis

        table = "geomtype_tests"

        write_postgis(df_geom_collection, con=engine, name=table, if_exists="replace")

        # Validate geometry type
        sql = "SELECT DISTINCT(GeometryType(geometry)) FROM {table} ORDER BY 1;".format(
            table=table
        )
        geom_type = engine.execute(sql).fetchone()[0]
        sql = "SELECT * FROM {table};".format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")

        assert geom_type.upper() == "GEOMETRYCOLLECTION"
        assert df.geom_type.unique()[0] == "GeometryCollection"

    def test_write_postgis_mixed_geometry_types(
        self, engine_postgis, df_mixed_single_and_multi
    ):
        """
        Tests that writing a mix of single and MultiGeometries is possible.
        """
        engine = engine_postgis

        table = "geomtype_tests"

        write_postgis(
            df_mixed_single_and_multi, con=engine, name=table, if_exists="replace"
        )

        # Validate geometry type
        sql = "SELECT DISTINCT GeometryType(geometry) FROM {table} ORDER BY 1;".format(
            table=table
        )
        res = engine.execute(sql).fetchall()
        assert res[0][0].upper() == "LINESTRING"
        assert res[1][0].upper() == "MULTILINESTRING"
        assert res[2][0].upper() == "POINT"

    def test_write_postgis_linear_ring(self, engine_postgis, df_linear_ring):
        """
        Tests that writing a LinearRing.
        """
        engine = engine_postgis

        table = "geomtype_tests"

        write_postgis(df_linear_ring, con=engine, name=table, if_exists="replace")

        # Validate geometry type
        sql = "SELECT DISTINCT(GeometryType(geometry)) FROM {table} ORDER BY 1;".format(
            table=table
        )
        geom_type = engine.execute(sql).fetchone()[0]

        assert geom_type.upper() == "LINESTRING"

    def test_write_postgis_in_chunks(self, engine_postgis, df_mixed_single_and_multi):
        """
        Tests writing a LinearRing works.
        """
        engine = engine_postgis

        table = "geomtype_tests"

        write_postgis(
            df_mixed_single_and_multi,
            con=engine,
            name=table,
            if_exists="replace",
            chunksize=1,
        )
        # Validate row count
        sql = "SELECT COUNT(geometry) FROM {table};".format(table=table)
        row_cnt = engine.execute(sql).fetchone()[0]
        assert row_cnt == 3

        # Validate geometry type
        sql = "SELECT DISTINCT GeometryType(geometry) FROM {table} ORDER BY 1;".format(
            table=table
        )
        res = engine.execute(sql).fetchall()
        assert res[0][0].upper() == "LINESTRING"
        assert res[1][0].upper() == "MULTILINESTRING"
        assert res[2][0].upper() == "POINT"

    def test_write_postgis_to_different_schema(self, engine_postgis, df_nybb):
        """
        Tests writing data to alternative schema.
        """
        engine = engine_postgis

        table = "nybb"
        schema_to_use = "test"
        sql = "CREATE SCHEMA IF NOT EXISTS {schema};".format(schema=schema_to_use)
        engine.execute(sql)

        write_postgis(
            df_nybb, con=engine, name=table, if_exists="replace", schema=schema_to_use
        )
        # Validate
        sql = "SELECT * FROM {schema}.{table};".format(
            schema=schema_to_use, table=table
        )

        df = read_postgis(sql, engine, geom_col="geometry")
        validate_boro_df(df)

    def test_write_postgis_to_different_schema_when_table_exists(
        self, engine_postgis, df_nybb
    ):
        """
        Tests writing data to alternative schema.
        """
        engine = engine_postgis

        table = "nybb"
        schema_to_use = "test"
        sql = "CREATE SCHEMA IF NOT EXISTS {schema};".format(schema=schema_to_use)
        engine.execute(sql)

        try:
            write_postgis(
                df_nybb, con=engine, name=table, if_exists="fail", schema=schema_to_use
            )
            # Validate
            sql = "SELECT * FROM {schema}.{table};".format(
                schema=schema_to_use, table=table
            )

            df = read_postgis(sql, engine, geom_col="geometry")
            validate_boro_df(df)

        # Should raise a ValueError when table exists
        except ValueError:
            pass

        # Try with replace flag on
        write_postgis(
            df_nybb, con=engine, name=table, if_exists="replace", schema=schema_to_use
        )
        # Validate
        sql = "SELECT * FROM {schema}.{table};".format(
            schema=schema_to_use, table=table
        )

        df = read_postgis(sql, engine, geom_col="geometry")
        validate_boro_df(df)

    def test_write_postgis_3D_geometries(self, engine_postgis, df_3D_geoms):
        """
        Tests writing a geometries with 3 dimensions works.
        """
        engine = engine_postgis

        table = "geomtype_tests"

        write_postgis(df_3D_geoms, con=engine, name=table, if_exists="replace")

        # Check that all geometries have 3 dimensions
        sql = "SELECT * FROM {table};".format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")
        assert list(df.geometry.has_z) == [True, True, True]

    def test_row_order(self, engine_postgis, df_nybb):
        """
        Tests that the row order in db table follows the order of the original frame.
        """
        engine = engine_postgis

        table = "row_order_test"
        correct_order = df_nybb["BoroCode"].tolist()

        write_postgis(df_nybb, con=engine, name=table, if_exists="replace")

        # Check that the row order matches
        sql = "SELECT * FROM {table};".format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")
        assert df["BoroCode"].tolist() == correct_order

    def test_append_before_table_exists(self, engine_postgis, df_nybb):
        """
        Tests that insert works with if_exists='append' when table does not exist yet.
        """
        engine = engine_postgis

        table = "nybb"
        # If table exists, delete it before trying to write with defaults
        drop_table_if_exists(engine, table)

        write_postgis(df_nybb, con=engine, name=table, if_exists="append")

        # Check that the row order matches
        sql = "SELECT * FROM {table};".format(table=table)
        df = read_postgis(sql, engine, geom_col="geometry")
        validate_boro_df(df)

    def test_append_with_different_crs(self, engine_postgis, df_nybb):
        """
        Tests that the warning is raised if table CRS differs from frame.
        """
        engine = engine_postgis

        table = "nybb"
        write_postgis(df_nybb, con=engine, name=table, if_exists="replace")

        # Reproject
        df_nybb2 = df_nybb.to_crs(epsg=4326)

        # Should raise error when appending
        with pytest.raises(ValueError, match="CRS of the target table"):
            write_postgis(df_nybb2, con=engine, name=table, if_exists="append")