File: test_config.py

package info (click to toggle)
python-aiobotocore 2.25.2-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 2,524 kB
  • sloc: python: 15,437; makefile: 84
file content (173 lines) | stat: -rw-r--r-- 5,319 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
import aiohttp.resolver
import anyio
import pytest
from botocore.config import Config
from botocore.exceptions import ParamValidationError, ReadTimeoutError

from aiobotocore.config import AioConfig
from aiobotocore.httpsession import AIOHTTPSession
from aiobotocore.httpxsession import HttpxSession
from aiobotocore.session import AioSession, get_session
from tests.mock_server import AIOServer


async def test_connector_args(current_http_backend: str):
    with pytest.raises(ParamValidationError):
        # wrong type
        connector_args: dict[str, object] = dict(use_dns_cache=1)
        AioConfig(connector_args)

    with pytest.raises(ParamValidationError):
        # wrong type
        connector_args = dict(ttl_dns_cache="1")
        AioConfig(connector_args)

    with pytest.raises(ParamValidationError):
        # wrong type
        connector_args = dict(keepalive_timeout="1")
        AioConfig(connector_args)

    with pytest.raises(ParamValidationError):
        # wrong type
        connector_args = dict(force_close="1")
        AioConfig(connector_args)

    with pytest.raises(ParamValidationError):
        # wrong type
        connector_args = dict(keepalive_timeout="1")
        AioConfig(connector_args)

    with pytest.raises(ParamValidationError):
        # wrong type
        connector_args = dict(ssl_context="1")
        AioConfig(connector_args)

    with pytest.raises(ParamValidationError):
        # invalid DNS resolver
        connector_args = dict(resolver="1")
        AioConfig(connector_args)

    with pytest.raises(ParamValidationError):
        # invalid key
        connector_args = dict(foo="1")
        AioConfig(connector_args)

    with pytest.raises(
        ParamValidationError,
        match='Httpx does not support dns caching. https://github.com/encode/httpx/discussions/2211',
    ):
        AioConfig({'use_dns_cache': True}, http_session_cls=HttpxSession)

    with pytest.raises(
        ParamValidationError,
        match='Httpx backend does not currently support force_close.',
    ):
        AioConfig({'force_close': True}, http_session_cls=HttpxSession)

    with pytest.raises(
        ParamValidationError, match='Httpx backend does not support resolver.'
    ):
        AioConfig({'resolver': True}, http_session_cls=HttpxSession)

    # Test valid configs:
    AioConfig({"ttl_dns_cache": None})
    AioConfig({"ttl_dns_cache": 1})
    AioConfig({"resolver": aiohttp.resolver.DefaultResolver()})
    AioConfig({'keepalive_timeout': None})

    # test merge
    cfg = Config(read_timeout=75)
    aio_cfg = AioConfig({'keepalive_timeout': 75})
    aio_cfg.merge(cfg)

    assert cfg.read_timeout == 75
    assert aio_cfg.connector_args['keepalive_timeout'] == 75


async def test_connector_timeout():
    session = AioSession()
    config = AioConfig(
        max_pool_connections=1, connect_timeout=1, retries={'max_attempts': 0}
    )
    async with (
        AIOServer() as server,
        session.create_client(
            's3',
            config=config,
            endpoint_url=server.endpoint_url,
            aws_secret_access_key='xxx',
            aws_access_key_id='xxx',
        ) as s3_client,
    ):

        async def get_and_wait():
            await s3_client.get_object(Bucket='foo', Key='bar')
            await anyio.sleep(100)

        # second request should not timeout just because there isn't a
        # connector available
        with anyio.move_on_after(3):
            async with anyio.create_task_group() as tg:
                tg.start_soon(get_and_wait)
                tg.start_soon(get_and_wait)


async def test_connector_timeout2():
    session = AioSession()
    config = AioConfig(
        max_pool_connections=1,
        connect_timeout=1,
        read_timeout=1,
        retries={'max_attempts': 0},
    )
    async with (
        AIOServer() as server,
        session.create_client(
            's3',
            config=config,
            endpoint_url=server.endpoint_url,
            aws_secret_access_key='xxx',
            aws_access_key_id='xxx',
        ) as s3_client,
    ):
        with pytest.raises(ReadTimeoutError):
            resp = await s3_client.get_object(Bucket='foo', Key='bar')
            await resp["Body"].read()


async def test_get_session():
    session = get_session()
    assert isinstance(session, AioSession)


def test_merge():
    config = AioConfig()
    other_config = AioConfig()
    new_config = config.merge(other_config)
    assert isinstance(new_config, AioConfig)
    assert new_config is not config
    assert new_config is not other_config


# Check that it's possible to specify custom http_session_cls
async def test_config_http_session_cls():
    class SuccessExc(Exception): ...

    class MyHttpSession(AIOHTTPSession):
        async def send(self, request):
            raise SuccessExc

    config = AioConfig(http_session_cls=MyHttpSession)
    session = AioSession()
    async with (
        AIOServer() as server,
        session.create_client(
            's3',
            config=config,
            endpoint_url=server.endpoint_url,
            aws_secret_access_key='xxx',
            aws_access_key_id='xxx',
        ) as s3_client,
    ):
        with pytest.raises(SuccessExc):
            await s3_client.get_object(Bucket='foo', Key='bar')