File: test_aws.py

package info (click to toggle)
python-maggma 0.70.0-7
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 1,416 kB
  • sloc: python: 10,150; makefile: 12
file content (453 lines) | stat: -rw-r--r-- 13,899 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
import time
from datetime import datetime

import boto3
import pytest
from botocore.exceptions import ClientError
from moto import mock_aws
from sshtunnel import BaseSSHTunnelForwarderError

from maggma.stores import MemoryStore, MongoStore, S3Store
from maggma.stores.ssh_tunnel import SSHTunnel


@pytest.fixture()
def mongostore():
    store = MongoStore("maggma_test", "test")
    store.connect()
    yield store
    store._collection.drop()


@pytest.fixture()
def ssh_tunnel():
    try:
        tunnel = SSHTunnel("127.0.0.1:22", "127.0.0.1:27017", local_port=9000)
    except (ValueError, BaseSSHTunnelForwarderError):
        # fallback to not use a tunnel if there is error in creating the tunnel
        tunnel = None

    return tunnel


@pytest.fixture()
def s3store():
    with mock_aws():
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index", key="task_id")
        store = S3Store(index, "bucket1", key="task_id")
        store.connect()

        store.update(
            [
                {
                    "task_id": "mp-1",
                    "data": "asd",
                    store.last_updated_field: datetime.utcnow(),
                }
            ]
        )
        store.update(
            [
                {
                    "task_id": "mp-3",
                    "data": "sdf",
                    store.last_updated_field: datetime.utcnow(),
                }
            ]
        )

        yield store


@pytest.fixture()
def s3store_w_subdir():
    with mock_aws():
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index")
        store = S3Store(index, "bucket1", sub_dir="subdir1", s3_workers=1)
        store.connect()

        yield store


@pytest.fixture()
def s3store_multi():
    with mock_aws():
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index")
        store = S3Store(index, "bucket1", s3_workers=4)
        store.connect()

        yield store


@pytest.fixture()
def s3store_with_tunnel(ssh_tunnel):
    with mock_aws():
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index", key="task_id")
        store = S3Store(index, "bucket1", key="task_id", ssh_tunnel=ssh_tunnel)
        store.connect()

        yield store


def test_keys():
    with mock_aws():
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket1")
        index = MemoryStore("index", key=1)
        with pytest.raises(AssertionError, match=r"Since we are.*"):
            S3Store(index, "bucket1", s3_workers=4, key="1")
        index = MemoryStore("index", key="key1")
        with pytest.warns(UserWarning, match=r"The desired S3Store.*$"):
            store = S3Store(index, "bucket1", s3_workers=4, key="key2")
        store.connect()
        store.update({"key1": "mp-1", "data": "1234"})
        with pytest.raises(KeyError):
            store.update({"key2": "mp-2", "data": "1234"})
        assert store.key == store.index.key == "key1"


def test_multi_update(s3store, s3store_multi):
    data = [
        {
            "task_id": str(j),
            "data": "DATA",
            s3store_multi.last_updated_field: datetime.utcnow(),
        }
        for j in range(32)
    ]

    def fake_writing(doc, search_keys):
        time.sleep(0.20)
        return {k: doc[k] for k in search_keys}

    s3store.write_doc_to_s3 = fake_writing
    s3store_multi.write_doc_to_s3 = fake_writing

    start = time.time()
    s3store_multi.update(data, key=["task_id"])
    end = time.time()
    time_multi = end - start

    start = time.time()
    s3store.update(data, key=["task_id"])
    end = time.time()
    time_single = end - start
    assert time_single > time_multi * (s3store_multi.s3_workers - 1) / (s3store.s3_workers)


def test_count(s3store):
    assert s3store.count() == 2
    assert s3store.count({"task_id": "mp-3"}) == 1


def test_query(s3store):
    assert s3store.query_one(criteria={"task_id": "mp-2"}) is None
    assert s3store.query_one(criteria={"task_id": "mp-1"})["data"] == "asd"
    assert s3store.query_one(criteria={"task_id": "mp-3"})["data"] == "sdf"

    assert len(list(s3store.query())) == 2


def test_update(s3store):
    s3store.update(
        [
            {
                "task_id": "mp-199999",
                "data": "asd",
                s3store.last_updated_field: datetime.utcnow(),
            }
        ]
    )
    assert s3store.query_one({"task_id": "mp-199999"}) is not None

    s3store.compress = True
    s3store.update([{"task_id": "mp-4", "data": "asd"}])
    obj = s3store.index.query_one({"task_id": "mp-4"})
    assert obj["compression"] == "zlib"
    assert obj["obj_hash"] == "be74de5ac71f00ec9e96441a3c325b0592c07f4c"
    assert s3store.query_one({"task_id": "mp-4"})["data"] == "asd"


def test_rebuild_meta_from_index(s3store):
    s3store.update([{"task_id": "mp-2", "data": "asd"}])
    s3store.index.update({"task_id": "mp-2", "add_meta": "hello"})
    s3store.rebuild_metadata_from_index()
    s3_object = s3store.s3_bucket.Object("mp-2")
    assert s3_object.metadata["add_meta"] == "hello"


def test_rebuild_index(s3store):
    s3store.update([{"task_id": "mp-2", "data": "asd"}])
    assert s3store.index.query_one({"task_id": "mp-2"})["obj_hash"] == "a69fe0c2cca3a3384c2b1d2f476972704f179741"
    s3store.index.remove_docs({})
    assert s3store.index.query_one({"task_id": "mp-2"}) is None
    s3store.rebuild_index_from_s3_data()
    assert s3store.index.query_one({"task_id": "mp-2"})["obj_hash"] == "a69fe0c2cca3a3384c2b1d2f476972704f179741"


def tests_msonable_read_write(s3store):
    dd = s3store.as_dict()
    s3store.update([{"task_id": "mp-2", "data": dd}])
    res = s3store.query_one({"task_id": "mp-2"})
    assert res["data"]["@module"] == "maggma.stores.aws"


def test_remove(s3store):
    def objects_in_bucket(key):
        objs = list(s3store.s3_bucket.objects.filter(Prefix=key))
        return key in [o.key for o in objs]

    s3store.update([{"task_id": "mp-2", "data": "asd"}])
    s3store.update([{"task_id": "mp-4", "data": "asd"}])
    s3store.update({"task_id": "mp-5", "data": "aaa"})
    assert s3store.query_one({"task_id": "mp-2"}) is not None
    assert s3store.query_one({"task_id": "mp-4"}) is not None
    assert objects_in_bucket("mp-2")
    assert objects_in_bucket("mp-4")

    s3store.remove_docs({"task_id": "mp-2"})
    s3store.remove_docs({"task_id": "mp-4"}, remove_s3_object=True)

    assert objects_in_bucket("mp-2")
    assert not objects_in_bucket("mp-4")

    assert s3store.query_one({"task_id": "mp-5"}) is not None


def test_close(s3store):
    list(s3store.query())
    s3store.close()
    with pytest.raises(AttributeError):
        list(s3store.query())


def test_bad_import(mocker):
    mocker.patch("maggma.stores.aws.boto3", None)
    index = MemoryStore("index")
    with pytest.raises(RuntimeError):
        S3Store(index, "bucket1")


def test_aws_error(s3store):
    def raise_exception_NoSuchKey(data):
        error_response = {"Error": {"Code": "NoSuchKey", "Message": "The specified key does not exist."}}
        raise ClientError(error_response, "raise_exception")

    def raise_exception_other(data):
        error_response = {"Error": {"Code": 405}}
        raise ClientError(error_response, "raise_exception")

    s3store.s3_bucket.Object = raise_exception_other
    with pytest.raises(ClientError):
        s3store.query_one()

    # Should just pass
    s3store.s3_bucket.Object = raise_exception_NoSuchKey
    s3store.query_one()


def test_eq(mongostore, s3store):
    assert s3store == s3store
    assert mongostore != s3store


def test_count_subdir(s3store_w_subdir):
    s3store_w_subdir.update([{"task_id": "mp-1", "data": "asd"}])
    s3store_w_subdir.update([{"task_id": "mp-2", "data": "asd"}])

    assert s3store_w_subdir.count() == 2
    assert s3store_w_subdir.count({"task_id": "mp-2"}) == 1


def test_subdir_field(s3store_w_subdir):
    s3store_w_subdir.update([{"task_id": "mp-1", "data": "asd"}])
    s3store_w_subdir.update([{"task_id": "mp-2", "data": "asd"}])

    for cc in s3store_w_subdir.index.query():
        assert len(cc["sub_dir"]) > 0
        assert cc["sub_dir"] == s3store_w_subdir.sub_dir


def test_remove_subdir(s3store_w_subdir):
    s3store_w_subdir.update([{"task_id": "mp-2", "data": "asd"}])
    s3store_w_subdir.update([{"task_id": "mp-4", "data": "asd"}])

    assert s3store_w_subdir.query_one({"task_id": "mp-2"}) is not None
    assert s3store_w_subdir.query_one({"task_id": "mp-4"}) is not None

    s3store_w_subdir.remove_docs({"task_id": "mp-2"})

    assert s3store_w_subdir.query_one({"task_id": "mp-2"}) is None
    assert s3store_w_subdir.query_one({"task_id": "mp-4"}) is not None


def test_searchable_fields(s3store):
    tic = datetime(2018, 4, 12, 16)

    data = [{"task_id": f"mp-{i}", "a": i, s3store.last_updated_field: tic} for i in range(4)]

    s3store.searchable_fields = ["task_id"]
    s3store.update(data, key="a")

    # This should only work if the searchable field was put into the index store
    assert set(s3store.distinct("task_id")) == {"mp-0", "mp-1", "mp-2", "mp-3"}


def test_newer_in(s3store):
    with mock_aws():
        tic = datetime(2018, 4, 12, 16)
        tic2 = datetime.utcnow()
        conn = boto3.client("s3")
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket")

        index_old = MemoryStore("index_old")
        old_store = S3Store(index_old, "bucket")
        old_store.connect()
        old_store.update([{"task_id": "mp-1", "last_updated": tic}])
        old_store.update([{"task_id": "mp-2", "last_updated": tic}])

        index_new = MemoryStore("index_new")
        new_store = S3Store(index_new, "bucket")
        new_store.connect()
        new_store.update([{"task_id": "mp-1", "last_updated": tic2}])
        new_store.update([{"task_id": "mp-2", "last_updated": tic2}])

        assert len(old_store.newer_in(new_store)) == 2
        assert len(new_store.newer_in(old_store)) == 0

        assert len(old_store.newer_in(new_store.index)) == 2
        assert len(new_store.newer_in(old_store.index)) == 0


def test_additional_metadata(s3store):
    tic = datetime(2018, 4, 12, 16)

    data = [{"task_id": f"mp-{i}", "a": i, s3store.last_updated_field: tic} for i in range(4)]

    s3store.update(data, key="a", additional_metadata="task_id")

    # This should only work if the searchable field was put into the index store
    assert set(s3store.distinct("task_id")) == {"mp-0", "mp-1", "mp-2", "mp-3"}


def test_get_session(s3store):
    index = MemoryStore("index")
    store = S3Store(
        index,
        "bucket1",
        s3_profile={
            "aws_access_key_id": "ACCESS_KEY",
            "aws_secret_access_key": "SECRET_KEY",
        },
    )
    assert store._get_session().get_credentials().access_key == "ACCESS_KEY"
    assert store._get_session().get_credentials().secret_key == "SECRET_KEY"


def test_no_bucket():
    with mock_aws():
        conn = boto3.resource("s3", region_name="us-east-1")
        conn.create_bucket(Bucket="bucket1")

        index = MemoryStore("index")
        store = S3Store(index, "bucket2")
        with pytest.raises(RuntimeError, match=r".*Bucket not present.*"):
            store.connect()


def test_force_reset(s3store):
    content = [
        {
            "task_id": "mp-4",
            "data": "abc",
            s3store.last_updated_field: datetime.utcnow(),
        }
    ]

    s3store.connect(force_reset=True)
    s3store.update(content)
    assert s3store.count({"task_id": "mp-4"}) == 1

    s3store.s3 = None
    s3store.connect()
    s3store.update(content)
    assert s3store.count({"task_id": "mp-4"}) == 1

    s3store.close()


def test_ssh_tunnel(s3store_with_tunnel):
    """This test will actually create a real tunnel to test the functionality.

    The tunnel will be set to `None` if the tunnel cannot be created. As a result,
    it becomes a test not testing the functionality of S3Store with the tunnel.
    """
    content = [
        {
            "task_id": "mp-4",
            "data": "abc",
            s3store_with_tunnel.last_updated_field: datetime.utcnow(),
        }
    ]

    s3store_with_tunnel.update(content)
    assert s3store_with_tunnel.count({"task_id": "mp-4"}) == 1

    s3store_with_tunnel.close()


def test_ssh_tunnel_2():
    """
    This test mocks the SSHTunnel behavior by creating a fake tunnel.

    The purpose is to check the behavior of the S3Store when the tunnel is not `None`.
    This complements the `test_ssh_tunnel` test above.
    """

    class FakeTunnel:
        def __init__(self, *args, **kwargs):
            pass

        def start(self):
            pass

        def stop(self):
            pass

        def local_address(self):
            return "ADDRESS", "PORT"

    def get_store():
        with mock_aws():
            conn = boto3.resource("s3", region_name="us-east-1")
            conn.create_bucket(Bucket="bucket1")

            index = MemoryStore("index", key="task_id")
            store = S3Store(index, "bucket1", key="task_id", ssh_tunnel=FakeTunnel())
            store.connect()
            store._get_session()
            assert store._get_endpoint_url() == "http://ADDRESS:PORT"
            store.close()

            yield store

    get_store()


def test_index_store_kwargs(mongostore):
    index = MongoStore("db", collection_name="index", key="task_id")
    store = S3Store(index, "bucket1", key="task_id", index_store_kwargs={"port": 12345})
    assert store.index.port == 12345