File: threading_test.py

package info (click to toggle)
python-maxminddb 3.0.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 6,600 kB
  • sloc: ansic: 7,565; python: 1,711; perl: 987; makefile: 273; sh: 190
file content (274 lines) | stat: -rw-r--r-- 8,092 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
"""Tests for thread-safety and free-threading support."""

from __future__ import annotations

import threading
import time
import unittest
from typing import TYPE_CHECKING

if TYPE_CHECKING:
    from maxminddb.types import Record

try:
    import maxminddb.extension  # noqa: F401

    HAS_EXTENSION = True
except ImportError:
    HAS_EXTENSION = False

from maxminddb import open_database
from maxminddb.const import MODE_MMAP_EXT


@unittest.skipIf(
    not HAS_EXTENSION,
    "No C extension module found. Skipping threading tests",
)
class TestThreadSafety(unittest.TestCase):
    """Test thread safety of the C extension."""

    def test_concurrent_reads(self) -> None:
        """Test multiple threads reading concurrently."""
        reader = open_database(
            "tests/data/test-data/MaxMind-DB-test-ipv4-24.mmdb",
            MODE_MMAP_EXT,
        )

        results: list[Record | None] = [None] * 100
        errors: list[Exception] = []

        def lookup(index: int, ip: str) -> None:
            try:
                results[index] = reader.get(ip)
            except Exception as e:  # noqa: BLE001
                errors.append(e)

        threads = []
        for i in range(100):
            ip = f"1.1.1.{(i % 32) + 1}"
            t = threading.Thread(target=lookup, args=(i, ip))
            threads.append(t)
            t.start()

        for t in threads:
            t.join()

        reader.close()

        self.assertEqual(len(errors), 0, f"Errors during concurrent reads: {errors}")
        # All lookups should have completed
        self.assertNotIn(None, results)

    def test_read_during_close(self) -> None:
        """Test that close is safe when reads are happening concurrently."""
        reader = open_database(
            "tests/data/test-data/MaxMind-DB-test-ipv4-24.mmdb",
            MODE_MMAP_EXT,
        )

        errors: list[Exception] = []
        should_stop = threading.Event()

        def continuous_reader() -> None:
            # Keep reading until signaled to stop or reader is closed
            while not should_stop.is_set():
                try:
                    reader.get("1.1.1.1")
                except ValueError as e:  # noqa: PERF203
                    # Expected once close() is called
                    if "closed MaxMind DB" not in str(e):
                        errors.append(e)
                    break
                except Exception as e:  # noqa: BLE001
                    errors.append(e)
                    break

        # Start multiple readers
        threads = [threading.Thread(target=continuous_reader) for _ in range(10)]
        for t in threads:
            t.start()

        # Let readers run for a bit
        time.sleep(0.05)

        # Close while reads are happening
        reader.close()

        # Signal threads to stop
        should_stop.set()

        # Wait for all threads
        for t in threads:
            t.join(timeout=1.0)

        self.assertEqual(len(errors), 0, f"Errors during close test: {errors}")

    def test_read_after_close(self) -> None:
        """Test that reads after close raise appropriate error."""
        reader = open_database(
            "tests/data/test-data/MaxMind-DB-test-ipv4-24.mmdb",
            MODE_MMAP_EXT,
        )
        reader.close()

        with self.assertRaisesRegex(
            ValueError,
            "Attempt to read from a closed MaxMind DB",
        ):
            reader.get("1.1.1.1")

    def test_concurrent_reads_and_metadata(self) -> None:
        """Test concurrent reads and metadata access."""
        reader = open_database(
            "tests/data/test-data/MaxMind-DB-test-ipv4-24.mmdb",
            MODE_MMAP_EXT,
        )

        errors: list[Exception] = []
        results: list[bool] = []

        def do_reads() -> None:
            try:
                for _ in range(50):
                    reader.get("1.1.1.1")
                results.append(True)
            except Exception as e:  # noqa: BLE001
                errors.append(e)

        def do_metadata() -> None:
            try:
                for _ in range(50):
                    reader.metadata()
                results.append(True)
            except Exception as e:  # noqa: BLE001
                errors.append(e)

        threads = []
        for _ in range(5):
            threads.append(threading.Thread(target=do_reads))
            threads.append(threading.Thread(target=do_metadata))

        for t in threads:
            t.start()

        for t in threads:
            t.join()

        reader.close()

        self.assertEqual(
            len(errors), 0, f"Errors during concurrent operations: {errors}"
        )
        self.assertEqual(len(results), 10, "All threads should complete")

    def test_concurrent_iteration(self) -> None:
        """Test that iteration is thread-safe."""
        reader = open_database(
            "tests/data/test-data/MaxMind-DB-test-ipv4-24.mmdb",
            MODE_MMAP_EXT,
        )

        errors: list[Exception] = []
        counts: list[int] = []

        def iterate() -> None:
            try:
                count = 0
                for _ in reader:
                    count += 1
                counts.append(count)
            except Exception as e:  # noqa: BLE001
                errors.append(e)

        threads = [threading.Thread(target=iterate) for _ in range(10)]

        for t in threads:
            t.start()

        for t in threads:
            t.join()

        reader.close()

        self.assertEqual(len(errors), 0, f"Errors during iteration: {errors}")
        # All threads should see the same number of entries
        self.assertEqual(len(set(counts)), 1, "All threads should see same entry count")

    def test_stress_test(self) -> None:
        """Stress test with many threads and operations."""
        reader = open_database(
            "tests/data/test-data/MaxMind-DB-test-ipv4-24.mmdb",
            MODE_MMAP_EXT,
        )

        errors: list[Exception] = []
        operations_completed = threading.Event()

        def random_operations() -> None:
            try:
                for i in range(100):
                    # Mix different operations
                    if i % 3 == 0:
                        reader.get("1.1.1.1")
                    elif i % 3 == 1:
                        reader.metadata()
                    else:
                        reader.get_with_prefix_len("1.1.1.2")
            except Exception as e:  # noqa: BLE001
                errors.append(e)

        threads = [threading.Thread(target=random_operations) for _ in range(20)]

        for t in threads:
            t.start()

        for t in threads:
            t.join()

        operations_completed.set()
        reader.close()

        self.assertEqual(len(errors), 0, f"Errors during stress test: {errors}")

    def test_multiple_readers_different_databases(self) -> None:
        """Test multiple readers on different databases in parallel."""
        errors: list[Exception] = []

        def use_reader(filename: str) -> None:
            try:
                reader = open_database(filename, MODE_MMAP_EXT)
                for _ in range(50):
                    reader.get("1.1.1.1")
                reader.close()
            except Exception as e:  # noqa: BLE001
                errors.append(e)

        threads = [
            threading.Thread(
                target=use_reader,
                args=("tests/data/test-data/MaxMind-DB-test-ipv4-24.mmdb",),
            )
            for _ in range(5)
        ]
        threads.extend(
            [
                threading.Thread(
                    target=use_reader,
                    args=("tests/data/test-data/MaxMind-DB-test-ipv6-24.mmdb",),
                )
                for _ in range(5)
            ]
        )

        for t in threads:
            t.start()

        for t in threads:
            t.join()

        self.assertEqual(len(errors), 0, f"Errors with multiple readers: {errors}")


if __name__ == "__main__":
    unittest.main()