File: pytest_resourceleaks.py

package info (click to toggle)
dask.distributed 2022.12.1%2Bds.1-3
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 10,164 kB
  • sloc: python: 81,938; javascript: 1,549; makefile: 228; sh: 100
file content (498 lines) | stat: -rw-r--r-- 16,279 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
"""A pytest plugin to trace resource leaks.

Usage
-----
This plugin is enabled from the command line with -L / --leaks.
See `pytest --help` for further configuration settings.

You may mark individual tests as known to be leaking with the fixture

    @pytest.mark.leaking(check1, check2, ...)

Where the valid checks are 'fds', 'memory', 'threads', 'processes', 'tracemalloc'.

e.g.

    @pytest.mark.leaking("threads")

If you do, the specified checks won't report errors.

Known issues
------------
- Tests that contain imports will be flagged as leaking RAM (memory and tracemallock
  checks) if it's the first time in the test suite that the import happens; e.g.

      def test1():
          pytest.importorskip("numpy")

  Same issue happens with tests that populate global caches (e.g. linecache, re).
  A previous version of this plugin had an option to silently retry a test once after a
  failure; that version is no longer working as of the latest pytest. Reinstating the
  flag would solve this issue. See pytest_rerunfailures code for inspiration.

- The @gen_cluster fixture leaks 2 fds on the first test decorated with it within a test
  suite; This issue would also be fixed by rerunning failing tests.

- The @pytest.mark.flaky decorator (pytest_rerunfailures) completely disables this
  plugin for the decorated tests.

- You cannot expect the process memory to go down immediately and deterministically as
  soon as you garbage collect Python objects. This makes the 'memory' check very
  unreliable. On Linux, this can be improved by reducing the MALLOC_TRIM glibc setting
  (see distributed.yaml).
"""
from __future__ import annotations

import gc
import os
import sys
import threading
import tracemalloc
from collections import defaultdict
from time import sleep
from typing import Any, ClassVar

import psutil
import pytest

from distributed.metrics import time


def pytest_addoption(parser):
    group = parser.getgroup("resource leaks")
    known_checkers = ", ".join(sorted("'%s'" % s for s in all_checkers))
    group.addoption(
        "-L",
        "--leaks",
        help="List of resources to monitor for leaks before and after each test. "
        "Can be 'all' or a comma-separated list of resource names "
        f"(possible values: {known_checkers}).",
    )
    group.addoption(
        "--leaks-timeout",
        type=float,
        default=0.5,
        help="Wait at most these many seconds before marking a test as leaking "
        "(default: %(default)s)",
    )
    group.addoption(
        "--leaks-fail",
        action="store_true",
        help="Mark leaked tests as failed",
    )


def pytest_configure(config: pytest.Config) -> None:
    leaks = config.getvalue("leaks")
    if not leaks:
        return
    if leaks == "all":
        leaks = sorted(c for c in all_checkers if c != "demo")
    else:
        leaks = leaks.split(",")
    unknown = sorted(set(leaks) - set(all_checkers))
    if unknown:
        raise ValueError(f"unknown resources: {unknown!r}")

    checkers = [all_checkers[leak]() for leak in leaks]
    checker = LeakChecker(
        checkers=checkers,
        grace_delay=config.getvalue("leaks_timeout"),
        mark_failed=config.getvalue("leaks_fail"),
    )
    config.pluginmanager.register(checker, "leaks_checker")


all_checkers: dict[str, type[ResourceChecker]] = {}


class ResourceChecker:
    name: ClassVar[str]

    def __init_subclass__(cls, name: str):
        assert name not in all_checkers
        cls.name = name
        all_checkers[name] = cls

    def on_start_test(self) -> None:
        pass

    def on_stop_test(self) -> None:
        pass

    def on_retry(self) -> None:
        pass

    def measure(self) -> Any:
        raise NotImplementedError

    def has_leak(self, before: Any, after: Any) -> bool:
        raise NotImplementedError

    def format(self, before: Any, after: Any) -> str:
        raise NotImplementedError


class DemoChecker(ResourceChecker, name="demo"):
    """Checker that always leaks. Used to test the core LeakChecker functionality."""

    i: int

    def __init__(self):
        self.i = 0

    def measure(self) -> int:
        self.i += 1
        return self.i

    def has_leak(self, before: int, after: int) -> bool:
        return after > before

    def format(self, before: int, after: int) -> str:
        return f"counter increased from {before} to {after}"


class FDChecker(ResourceChecker, name="fds"):
    def measure(self) -> int:
        # Note: WINDOWS constant doesn't work with `mypy --platform win32`
        if sys.platform == "win32":
            # Don't use num_handles(); you'll get tens of thousands of reported leaks
            return 0
        else:
            return psutil.Process().num_fds()

    def has_leak(self, before: int, after: int) -> bool:
        return after > before

    def format(self, before: int, after: int) -> str:
        return f"leaked {after - before} file descriptor(s) ({before}->{after})"


class RSSMemoryChecker(ResourceChecker, name="memory"):
    LEAK_THRESHOLD = 10 * 2**20

    def measure(self) -> int:
        return psutil.Process().memory_info().rss

    def has_leak(self, before: int, after: int) -> bool:
        return after > before + self.LEAK_THRESHOLD

    def format(self, before: int, after: int) -> str:
        return f"leaked {(after - before) / 2**20:.1f} MiB of RSS memory"


class ActiveThreadsChecker(ResourceChecker, name="threads"):
    def measure(self) -> set[threading.Thread]:
        return set(threading.enumerate())

    def has_leak(
        self, before: set[threading.Thread], after: set[threading.Thread]
    ) -> bool:
        return not after <= before

    def format(
        self, before: set[threading.Thread], after: set[threading.Thread]
    ) -> str:
        leaked = after - before
        assert leaked
        return f"leaked {len(leaked)} Python thread(s): {sorted(leaked, key=str)}"


class ChildProcess:
    """Child process info

    We use pid and creation time as keys to disambiguate between processes (and protect
    against pid reuse); other properties such as cmdline may change for a given process
    """

    pid: int
    name: str
    cmdline: list[str]
    create_time: float

    def __init__(self, p: psutil.Process):
        self.pid = p.pid
        self.name = p.name()
        self.cmdline = p.cmdline()
        self.create_time = p.create_time()

    def __hash__(self) -> int:
        return self.pid

    def __eq__(self, other: object) -> bool:
        return (
            isinstance(other, ChildProcess)
            and self.pid == other.pid
            and self.create_time == other.create_time
        )

    def __lt__(self, other: object) -> bool:
        if not isinstance(other, ChildProcess):
            raise TypeError(other)
        return self.pid < other.pid


class ChildProcessesChecker(ResourceChecker, name="processes"):
    def measure(self) -> set[ChildProcess]:
        children = set()
        p = psutil.Process()
        for c in p.children(recursive=True):
            try:
                with c.oneshot():
                    if (
                        c.ppid() == p.pid
                        and os.path.samefile(c.exe(), sys.executable)
                        and any(
                            # Skip multiprocessing resource tracker
                            a.startswith(
                                "from multiprocessing.resource_tracker import main"
                            )
                            # Skip forkserver process; the forkserver's children
                            # however will be recorded normally
                            or a.startswith(
                                "from multiprocessing.forkserver import main"
                            )
                            for a in c.cmdline()
                        )
                    ):
                        continue

                    children.add(ChildProcess(c))
            except psutil.NoSuchProcess:
                pass
        return children

    def has_leak(self, before: set[ChildProcess], after: set[ChildProcess]) -> bool:
        return not after <= before

    def format(self, before: set[ChildProcess], after: set[ChildProcess]) -> str:
        leaked = sorted(after - before)
        assert leaked
        return f"leaked {len(leaked)} processes:\n" + "\n".join(
            f"  - pid={p.pid}, name={p.name!r}, cmdline={p.cmdline!r}" for p in leaked
        )


class TracemallocMemoryChecker(ResourceChecker, name="tracemalloc"):
    # Report a leak if the traced memory increased by at least this many bytes
    LEAK_THRESHOLD = 2**20
    # Report at most this many leaks
    NDIFF = 5
    # Report less than NDIFF leaks if they amount to less than this many bytes
    MIN_SIZE_DIFF = 200 * 1024

    def on_start_test(self) -> None:
        tracemalloc.start(1)

    def on_stop_test(self) -> None:
        tracemalloc.stop()

    def measure(self) -> tuple[int, tracemalloc.Snapshot]:
        current, _ = tracemalloc.get_traced_memory()
        snap = tracemalloc.take_snapshot()
        return current, snap

    def has_leak(
        self,
        before: tuple[int, tracemalloc.Snapshot],
        after: tuple[int, tracemalloc.Snapshot],
    ) -> bool:
        return after[0] > before[0] + self.LEAK_THRESHOLD

    def format(
        self,
        before: tuple[int, tracemalloc.Snapshot],
        after: tuple[int, tracemalloc.Snapshot],
    ) -> str:
        bytes_before, snap_before = before
        bytes_after, snap_after = after
        diff = snap_after.compare_to(snap_before, "traceback")

        lines = [
            f"leaked {(bytes_after - bytes_before) / 2 ** 20:.1f} MiB "
            "of traced Python memory"
        ]
        for stat in diff[: self.NDIFF]:
            size_diff = stat.size_diff or stat.size
            if size_diff < self.MIN_SIZE_DIFF:
                break
            count = stat.count_diff or stat.count
            lines += [f"  - leaked {size_diff / 2**20:.1f} MiB in {count} calls at:"]
            lines += ["    " + line for line in stat.traceback.format()]

        return "\n".join(lines)


class LeakChecker:
    checkers: list[ResourceChecker]
    grace_delay: float
    mark_failed: bool

    # {nodeid: {checkers}}
    skip_checkers: dict[str, set[ResourceChecker]]
    # {nodeid: {checker: [(before, after)]}}
    counters: dict[str, dict[ResourceChecker, list[tuple[Any, Any]]]]
    # {nodeid: [(checker, before, after)]}
    leaks: dict[str, list[tuple[ResourceChecker, Any, Any]]]
    # {nodeid: {outcomes}}
    outcomes: defaultdict[str, set[str]]

    def __init__(
        self,
        checkers: list[ResourceChecker],
        grace_delay: float,
        mark_failed: bool,
    ):
        self.checkers = checkers
        self.grace_delay = grace_delay
        self.mark_failed = mark_failed

        self.skip_checkers = {}
        self.counters = {}
        self.leaks = {}
        self.outcomes = defaultdict(set)

    def cleanup(self) -> None:
        gc.collect()

    def checks_for_item(self, nodeid: str) -> list[ResourceChecker]:
        return [c for c in self.checkers if c not in self.skip_checkers.get(nodeid, ())]

    def measure(self, nodeid: str) -> list[tuple[ResourceChecker, Any]]:
        # Return items in order
        return [(c, c.measure()) for c in self.checks_for_item(nodeid)]

    def measure_before_test(self, nodeid: str) -> None:
        for checker in self.checks_for_item(nodeid):
            checker.on_start_test()
        for checker, before in self.measure(nodeid):
            assert before is not None
            self.counters[nodeid][checker].append((before, None))

    def measure_after_test(self, nodeid: str) -> None:
        outcomes = self.outcomes[nodeid]
        # pytest_rerunfailures (@pytest.mark.flaky) breaks this plugin and causes
        # outcomes to be empty.
        if "passed" not in outcomes:
            # Test failed or skipped
            return

        def run_measurements() -> list[tuple[ResourceChecker, Any, Any]]:
            leaks = []
            for checker, after in self.measure(nodeid):
                c = self.counters[nodeid][checker]
                before, _ = c[-1]
                c[-1] = (before, after)
                if checker.has_leak(before, after):
                    leaks.append((checker, before, after))
            return leaks

        t1 = time()
        deadline = t1 + self.grace_delay
        leaks = run_measurements()
        if leaks:
            self.cleanup()
            for c, _, _ in leaks:
                c.on_retry()
            leaks = run_measurements()

        while leaks and time() < deadline:
            sleep(0.1)
            self.cleanup()
            for c, _, _ in leaks:
                c.on_retry()
            leaks = run_measurements()

        if leaks:
            self.leaks[nodeid] = leaks
        else:
            self.leaks.pop(nodeid, None)

        for checker in self.checks_for_item(nodeid):
            checker.on_stop_test()

    # Note on hook execution order:
    #   pytest_runtest_protocol
    #       pytest_runtest_setup
    #       pytest_report_teststatus
    #       pytest_runtest_call
    #       pytest_report_teststatus
    #       pytest_runtest_teardown
    #       pytest_report_teststatus

    # See also https://github.com/abalkin/pytest-leaks/blob/master/pytest_leaks.py

    @pytest.hookimpl(hookwrapper=True)
    def pytest_runtest_protocol(self, item, nextitem):
        if not self.checkers:
            return

        nodeid = item.nodeid
        assert nodeid not in self.counters
        self.counters[nodeid] = {c: [] for c in self.checkers}

        leaking_mark = item.get_closest_marker("leaking")
        if leaking_mark:
            unknown = sorted(set(leaking_mark.args) - set(all_checkers))
            if unknown:
                raise ValueError(
                    f"pytest.mark.leaking: unknown resources {unknown}; "
                    f"must be one of {list(all_checkers)}"
                )
            classes = tuple(all_checkers[a] for a in leaking_mark.args)
            self.skip_checkers[nodeid] = {
                c for c in self.checkers if isinstance(c, classes)
            }

        yield

    @pytest.hookimpl(hookwrapper=True)
    def pytest_runtest_setup(self, item):
        self.measure_before_test(item.nodeid)
        yield

    @pytest.hookimpl(hookwrapper=True, trylast=True)
    def pytest_runtest_teardown(self, item):
        yield
        self.measure_after_test(item.nodeid)
        leaks = self.leaks.get(item.nodeid)
        if leaks and self.mark_failed:
            # Trigger fail here to allow stopping with `-x`
            pytest.fail()

    @pytest.hookimpl(hookwrapper=True, trylast=True)
    def pytest_report_teststatus(self, report):
        nodeid = report.nodeid
        self.outcomes[nodeid].add(report.outcome)
        outcome = yield
        if report.when == "teardown":
            leaks = self.leaks.get(report.nodeid)
            if leaks:
                if self.mark_failed:
                    outcome.force_result(("failed", "L", "LEAKED"))
                    report.outcome = "failed"
                    report.longrepr = "\n".join(
                        [
                            f"{nodeid} leaking {checker.name}: "
                            f"{checker.format(before, after)}"
                            for checker, before, after in leaks
                        ]
                    )
                else:
                    outcome.force_result(("leaked", "L", "LEAKED"))

    @pytest.hookimpl
    def pytest_terminal_summary(self, terminalreporter, exitstatus):
        tr = terminalreporter
        leaked = tr.getreports("leaked")
        if leaked:
            # If mark_failed is False, leaks are output as a separate
            # results section
            tr.write_sep("=", "RESOURCE LEAKS")
            for rep in leaked:
                nodeid = rep.nodeid
                for checker, before, after in self.leaks[nodeid]:
                    tr.line(
                        f"{rep.nodeid} leaking {checker.name}: "
                        f"{checker.format(before, after)}"
                    )