1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227
|
import sys
import warnings
from inspect import isabstract
from typing import Any
from test import support
from test.support import os_helper
from .runtests import HuntRefleak
from .utils import clear_caches
try:
from _abc import _get_dump
except ImportError:
import weakref
def _get_dump(cls):
# Reimplement _get_dump() for pure-Python implementation of
# the abc module (Lib/_py_abc.py)
registry_weakrefs = set(weakref.ref(obj) for obj in cls._abc_registry)
return (registry_weakrefs, cls._abc_cache,
cls._abc_negative_cache, cls._abc_negative_cache_version)
def runtest_refleak(test_name, test_func,
hunt_refleak: HuntRefleak,
quiet: bool):
"""Run a test multiple times, looking for reference leaks.
Returns:
False if the test didn't leak references; True if we detected refleaks.
"""
# This code is hackish and inelegant, but it seems to do the job.
import copyreg
import collections.abc
if not hasattr(sys, 'gettotalrefcount'):
raise Exception("Tracking reference leaks requires a debug build "
"of Python")
# Avoid false positives due to various caches
# filling slowly with random data:
warm_caches()
# Save current values for dash_R_cleanup() to restore.
fs = warnings.filters[:]
ps = copyreg.dispatch_table.copy()
pic = sys.path_importer_cache.copy()
zdc: dict[str, Any] | None
try:
import zipimport
except ImportError:
zdc = None # Run unmodified on platforms without zipimport support
else:
# private attribute that mypy doesn't know about:
zdc = zipimport._zip_directory_cache.copy() # type: ignore[attr-defined]
abcs = {}
for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
if not isabstract(abc):
continue
for obj in abc.__subclasses__() + [abc]:
abcs[obj] = _get_dump(obj)[0]
# bpo-31217: Integer pool to get a single integer object for the same
# value. The pool is used to prevent false alarm when checking for memory
# block leaks. Fill the pool with values in -1000..1000 which are the most
# common (reference, memory block, file descriptor) differences.
int_pool = {value: value for value in range(-1000, 1000)}
def get_pooled_int(value):
return int_pool.setdefault(value, value)
warmups = hunt_refleak.warmups
runs = hunt_refleak.runs
filename = hunt_refleak.filename
repcount = warmups + runs
# Pre-allocate to ensure that the loop doesn't allocate anything new
rep_range = list(range(repcount))
rc_deltas = [0] * repcount
alloc_deltas = [0] * repcount
fd_deltas = [0] * repcount
getallocatedblocks = sys.getallocatedblocks
gettotalrefcount = sys.gettotalrefcount
fd_count = os_helper.fd_count
# initialize variables to make pyflakes quiet
rc_before = alloc_before = fd_before = 0
if not quiet:
print("beginning", repcount, "repetitions. Showing number of leaks "
"(. for 0 or less, X for 10 or more)",
file=sys.stderr)
numbers = ("1234567890"*(repcount//10 + 1))[:repcount]
numbers = numbers[:warmups] + ':' + numbers[warmups:]
print(numbers, file=sys.stderr, flush=True)
results = None
dash_R_cleanup(fs, ps, pic, zdc, abcs)
support.gc_collect()
for i in rep_range:
results = test_func()
dash_R_cleanup(fs, ps, pic, zdc, abcs)
support.gc_collect()
# Read memory statistics immediately after the garbage collection.
alloc_after = getallocatedblocks()
rc_after = gettotalrefcount()
fd_after = fd_count()
rc_deltas[i] = get_pooled_int(rc_after - rc_before)
alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before)
fd_deltas[i] = get_pooled_int(fd_after - fd_before)
if not quiet:
# use max, not sum, so total_leaks is one of the pooled ints
total_leaks = max(rc_deltas[i], alloc_deltas[i], fd_deltas[i])
if total_leaks <= 0:
symbol = '.'
elif total_leaks < 10:
symbol = (
'.', '1', '2', '3', '4', '5', '6', '7', '8', '9',
)[total_leaks]
else:
symbol = 'X'
if i == warmups:
print(' ', end='', file=sys.stderr, flush=True)
print(symbol, end='', file=sys.stderr, flush=True)
del total_leaks
del symbol
alloc_before = alloc_after
rc_before = rc_after
fd_before = fd_after
if not quiet:
print(file=sys.stderr)
# These checkers return False on success, True on failure
def check_rc_deltas(deltas):
# Checker for reference counters and memory blocks.
#
# bpo-30776: Try to ignore false positives:
#
# [3, 0, 0]
# [0, 1, 0]
# [8, -8, 1]
#
# Expected leaks:
#
# [5, 5, 6]
# [10, 1, 1]
return all(delta >= 1 for delta in deltas)
def check_fd_deltas(deltas):
return any(deltas)
failed = False
for deltas, item_name, checker in [
(rc_deltas, 'references', check_rc_deltas),
(alloc_deltas, 'memory blocks', check_rc_deltas),
(fd_deltas, 'file descriptors', check_fd_deltas)
]:
# ignore warmup runs
deltas = deltas[warmups:]
failing = checker(deltas)
suspicious = any(deltas)
if failing or suspicious:
msg = '%s leaked %s %s, sum=%s' % (
test_name, deltas, item_name, sum(deltas))
print(msg, end='', file=sys.stderr)
if failing:
print(file=sys.stderr, flush=True)
with open(filename, "a", encoding="utf-8") as refrep:
print(msg, file=refrep)
refrep.flush()
failed = True
else:
print(' (this is fine)', file=sys.stderr, flush=True)
return (failed, results)
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
import copyreg
import collections.abc
# Restore some original values.
warnings.filters[:] = fs
copyreg.dispatch_table.clear()
copyreg.dispatch_table.update(ps)
sys.path_importer_cache.clear()
sys.path_importer_cache.update(pic)
try:
import zipimport
except ImportError:
pass # Run unmodified on platforms without zipimport support
else:
zipimport._zip_directory_cache.clear()
zipimport._zip_directory_cache.update(zdc)
# Clear ABC registries, restoring previously saved ABC registries.
# ignore deprecation warning for collections.abc.ByteString
abs_classes = [getattr(collections.abc, a) for a in collections.abc.__all__]
abs_classes = filter(isabstract, abs_classes)
for abc in abs_classes:
for obj in abc.__subclasses__() + [abc]:
for ref in abcs.get(obj, set()):
if ref() is not None:
obj.register(ref())
obj._abc_caches_clear()
# Clear caches
clear_caches()
# Clear type cache at the end: previous function calls can modify types
sys._clear_type_cache()
def warm_caches():
# char cache
s = bytes(range(256))
for i in range(256):
s[i:i+1]
# unicode cache
[chr(i) for i in range(256)]
# int cache
list(range(-5, 257))
|