File: conftest.py

package info (click to toggle)
fpdf2 2.8.4-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 53,860 kB
  • sloc: python: 39,487; sh: 133; makefile: 12
file content (425 lines) | stat: -rw-r--r-- 15,331 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
# pylint: disable=import-outside-toplevel
from contextlib import contextmanager
from datetime import datetime, timezone
from time import perf_counter
from timeit import timeit
from types import SimpleNamespace
import functools
import gc
import hashlib
import linecache
import logging
import os
import pathlib
import shutil
import tracemalloc
import warnings

from subprocess import check_output, CalledProcessError, PIPE

import pytest

from fpdf.util import get_process_rss_as_mib, print_mem_usage
from fpdf.template import Template

QPDF_AVAILABLE = bool(shutil.which("qpdf"))
if not QPDF_AVAILABLE:
    warnings.warn(
        "qpdf command not available on the $PATH, falling back to hash-based "
        "comparisons in tests"
    )

EPOCH = datetime(1969, 12, 31, 19, 00, 00).replace(tzinfo=timezone.utc)

LOREM_IPSUM = (
    "Lorem ipsum Ut nostrud irure reprehenderit anim nostrud dolore sed "
    "ut Excepteur dolore ut sunt irure consectetur tempor eu tempor "
    "nostrud dolore sint exercitation aliquip velit ullamco esse dolore "
    "mollit ea sed voluptate commodo amet eiusmod incididunt Excepteur "
    "Excepteur officia est ea dolore sed id in cillum incididunt quis ex "
    "id aliqua ullamco reprehenderit cupidatat in quis pariatur ex et "
    "veniam consectetur et minim minim nulla ea in quis Ut in "
    "consectetur cillum aliquip pariatur qui quis sint reprehenderit "
    "anim incididunt laborum dolor dolor est dolor fugiat ut officia do "
    "dolore deserunt nulla voluptate officia mollit elit consequat ad "
    "aliquip non nulla dolor nisi magna consectetur anim sint officia "
    "sit tempor anim do laboris ea culpa eu veniam sed cupidatat in anim "
    "fugiat culpa enim Ut cillum in exercitation magna nostrud aute "
    "proident laboris est ullamco nulla occaecat nulla proident "
    "consequat in ut labore non sit id cillum ut ea quis est ut dolore "
    "nisi aliquip aute pariatur ullamco ut cillum Duis nisi elit sit "
    "cupidatat do Ut aliqua irure sunt sunt proident sit aliqua in "
    "dolore Ut in sint sunt exercitation aliquip elit velit dolor nisi "
)


def assert_same_file(file1, file2):
    """
    Assert that two files are the same, by comparing their absolute paths.
    """
    assert os.path.normcase(os.path.abspath(file1)) == os.path.normcase(
        os.path.abspath(file2)
    )


def assert_pdf_equal(
    actual,
    expected,
    tmp_path,
    linearize=False,
    at_epoch=True,
    generate=False,
    ignore_id_changes=False,
    ignore_original_obj_ids=False,
    ignore_xref_offsets=False,
):
    """
    This compare the output of a `FPDF` instance (or `Template` instance),
    with the provided PDF file.

    The `CreationDate` of the newly generated PDF is fixed, so that it never triggers
    a diff.

    If the `qpdf` command is available on the `$PATH`, it will be used to perform the
    comparison, as it greatly helps debugging diffs. Otherwise, a hash-based comparison
    logic is used as a fallback.

    Args:
        actual: instance of `FPDF` or `Template`. The `output` or `render` method
          will be called on it.
        expected: instance of `FPDF`, `bytearray` or file path to a PDF file
          matching the expected output
        tmp_path (Path): temporary directory provided by pytest individually to the
          caller test function
        generate (bool): only generate `pdf` output to `rel_expected_pdf_filepath`
          and return. Useful to create new tests.
    """
    if isinstance(actual, Template):
        actual.render()
        actual_pdf = actual.pdf
    else:
        actual_pdf = actual
    if at_epoch:
        actual_pdf.creation_date = EPOCH
    if generate:
        assert isinstance(expected, pathlib.Path), (
            "When passing `True` to `generate`"
            "a pathlib.Path must be provided as the `expected` parameter"
        )
        actual_pdf.output(expected.open("wb"), linearize=linearize)
        return
    if isinstance(expected, pathlib.Path):
        expected_pdf_path = expected
    else:
        expected_pdf_path = tmp_path / "expected.pdf"
        with expected_pdf_path.open("wb") as pdf_file:
            if isinstance(expected, (bytes, bytearray)):
                pdf_file.write(expected)
            else:
                expected.set_creation_date(EPOCH)
                expected.output(pdf_file, linearize=linearize)
    actual_pdf_path = tmp_path / "actual.pdf"
    with actual_pdf_path.open("wb") as pdf_file:
        actual_pdf.output(pdf_file, linearize=linearize)
    if QPDF_AVAILABLE:  # Favor qpdf-based comparison, as it helps a lot debugging:
        actual_qpdf = _qpdf(actual_pdf_path)
        expected_qpdf = _qpdf(expected_pdf_path)
        (tmp_path / "actual_qpdf.pdf").write_bytes(actual_qpdf)
        (tmp_path / "expected_qpdf.pdf").write_bytes(expected_qpdf)
        actual_lines = actual_qpdf.splitlines()
        expected_lines = expected_qpdf.splitlines()
        if ignore_id_changes:
            actual_lines = filter_out_doc_id(actual_lines)
            expected_lines = filter_out_doc_id(expected_lines)
        if ignore_original_obj_ids:
            actual_lines = filter_out_original_obj_ids(actual_lines)
            expected_lines = filter_out_original_obj_ids(expected_lines)
        if ignore_xref_offsets:
            actual_lines = filter_out_xref_offsets(actual_lines)
            expected_lines = filter_out_xref_offsets(expected_lines)
        if actual_lines != expected_lines:
            # It is important to reduce the size of both list of bytes here,
            # to avoid .assertSequenceEqual to take forever to finish, that itself calls difflib.ndiff,
            # that has cubic complexity from this comment by Tim Peters: https://bugs.python.org/issue6931#msg223459
            actual_lines = subst_streams_with_hashes(actual_lines)
            expected_lines = subst_streams_with_hashes(expected_lines)
        assert actual_lines == expected_lines
        if linearize:
            _run_cmd("qpdf", "--check-linearization", str(actual_pdf_path))
    else:  # Fallback to hash comparison
        actual_hash = hashlib.md5(actual_pdf_path.read_bytes()).hexdigest()
        expected_hash = hashlib.md5(expected_pdf_path.read_bytes()).hexdigest()

        assert actual_hash == expected_hash, f"{actual_hash} != {expected_hash}"


def filter_out_doc_id(lines):
    return [line for line in lines if not line.startswith(b"  /ID [<")]


def filter_out_original_obj_ids(lines):
    return [line for line in lines if not line.startswith(b"%% Original object ID: ")]


def filter_out_xref_offsets(lines):
    return [line for line in lines if not line.endswith(b" 00000 n ")]


def check_signature(pdf, trusted_cert_paths, linearize=False):
    # pylint: disable=import-outside-toplevel
    from endesive import pdf as endesive_pdf

    trusted_certs = []
    for cert_filepath in trusted_cert_paths:
        with open(cert_filepath, "rb") as cert_file:
            trusted_certs.append(cert_file.read())
    results = endesive_pdf.verify(pdf.output(linearize=linearize), trusted_certs)
    for hash_ok, signature_ok, cert_ok in results:
        assert signature_ok
        assert hash_ok
        assert cert_ok


def subst_streams_with_hashes(in_lines):
    """
    This utility function reduce the length of `in_lines`, a list of bytes,
    by replacing multi-lines streams looking like this:

        stream
        {non-printable-binary-data}endstream

    by a single line with this format:

        <stream with MD5 hash: abcdef0123456789>
    """
    out_lines, stream = [], None
    for line in in_lines:
        if line == b"stream":
            assert stream is None
            stream = bytearray()
        elif stream == b"stream":
            # First line of stream, we check if it is binary or not:
            try:
                line.decode("latin-1")
                if not (b"\0" in line or b"\xff" in line):
                    # It's likely to be text! No need to compact stream
                    stream = None
            except UnicodeDecodeError:
                pass
        if stream is None:
            out_lines.append(line)
        else:
            stream += line
        if line.endswith(b"endstream") and stream:
            stream_hash = hashlib.md5(stream).hexdigest()
            out_lines.append(f"<stream with MD5 hash: {stream_hash}>\n".encode())
            stream = None
    return out_lines


def _qpdf(input_pdf_filepath):
    return _run_cmd(
        "qpdf",
        "--deterministic-id",
        "--password=fpdf2",
        "--qdf",
        str(input_pdf_filepath),
        "-",
    )


def _run_cmd(*args):
    try:
        return check_output(args, stderr=PIPE)
    except CalledProcessError as error:
        print(f"\nqpdf STDERR: {error.stderr.decode().strip()}")
        raise


@contextmanager
def time_execution():
    """
    Usage:

        with time_execution() as duration:
            ...
        assert duration.seconds < 10
    """
    ctx = SimpleNamespace()
    start = perf_counter()
    yield ctx
    ctx.seconds = perf_counter() - start


def ensure_exec_time_below(seconds, repeats=10):
    """
    Unit test decorator using the standard timeit module
    to check that average duration of the target test
    does not get over the limit provided.

    Those checks are only enabled if $CHECK_EXEC_TIME is set.

    This decorator replaced pytest-timeout, and is a better fit:
    * because it allows to know how much above the threshold the test ran
    * because it does not cause a global PyTest interruption
    * because it computes an average, and is hence more stable
    """

    def actual_decorator(test_func):
        @functools.wraps(test_func)
        def wrapper(*args, **kwargs):
            def func_with_args():
                test_func(*args, **kwargs)

            if not os.environ.get("CHECK_EXEC_TIME"):
                func_with_args()
                return

            total_elapsed_in_secs = timeit(func_with_args, number=repeats)
            # Compute average:
            avg_duration_in_secs = total_elapsed_in_secs / repeats
            assert avg_duration_in_secs < seconds

        return wrapper

    return actual_decorator


def ensure_rss_memory_below(mib):
    """
    Ensure there is no unexpected / significant increase between
    the process RSS memory BEFORE executing the test, and AFTER.

    Those checks are only enabled if $CHECK_RSS_MEMORY is set.

    This decorator replaced memunit, and is a better fit:
    * because it takes in consideration a difference of RSS values,
      not an absolute memory amount, and hence better checks
      the memory usage of a single test, with more isolation to other tests
    * because it does not suffer from some memory_profiler issues:
        + https://github.com/py-pdf/fpdf2/issues/641#issuecomment-1465730884
        + hanging MemTimer child process sometimes preventing PyTest finalization,
          blocking in multiprocessing.util._exit_function() :
          https://github.com/python/cpython/blob/3.11/Lib/multiprocessing/util.py#L355

    Sadly, we cannot use the same approach as ensure_exec_time_below()
    of using averages, as RSS measures are made on the same process and are not independent.
    """

    def actual_decorator(test_func):
        @functools.wraps(test_func)
        def wrapper(*args, **kwargs):
            if not os.environ.get("CHECK_RSS_MEMORY"):
                test_func(*args, **kwargs)
                return
            start_rss_in_mib = get_process_rss_as_mib()
            test_func(*args, **kwargs)
            if not start_rss_in_mib:
                return  # not available under Windows
            end_rss_in_mib = get_process_rss_as_mib()
            assert end_rss_in_mib - start_rss_in_mib < mib

        return wrapper

    return actual_decorator


# Enabling this check creates an increase in memory usage,
# so we require an opt-in through a CLI argument:
def pytest_addoption(parser):
    parser.addoption(
        "--trace-memory-usage",
        action="store_true",
        help="Trace the memory usage during tests execution",
    )
    parser.addoption(
        "--pympler-summary",
        action="store_true",
        help="At the end of the tests execution, display a summary of objects allocated in memory",
    )
    parser.addoption(
        "--trace-malloc",
        action="store_true",
        help="Trace main memory allocations differences during the whole execution",
    )


def pytest_configure():
    "Disable some loggers."
    logging.getLogger("fpdf.svg").propagate = False


@pytest.fixture(scope="module", autouse=True)
def module_memory_usage(request):
    start_rss_in_mib = None
    if request.config.getoption("trace_memory_usage"):
        start_rss_in_mib = get_process_rss_as_mib()
    yield
    if not start_rss_in_mib:
        return  # not available under Windows
    gc.collect()
    end_rss_in_mib = get_process_rss_as_mib()
    sign = "+" if end_rss_in_mib > start_rss_in_mib else ""
    capmanager = request.config.pluginmanager.getplugin("capturemanager")
    with capmanager.global_and_fixture_disabled():
        print("\n")
        print(
            f"Memory bump for {request.node.name.split('/')[-1]}: {sign}{end_rss_in_mib-start_rss_in_mib:.1f} MiB"
        )


@pytest.fixture(scope="session", autouse=True)
def final_memory_usage(request):
    yield
    if request.config.getoption("trace_memory_usage"):
        gc.collect()
        capmanager = request.config.pluginmanager.getplugin("capturemanager")
        with capmanager.global_and_fixture_disabled():
            print("\n")
            print_mem_usage("Final memory usage:")


@pytest.fixture(scope="session", autouse=True)
def pympler_summary(request):
    yield
    if request.config.getoption("pympler_summary"):
        # pylint: disable=import-error
        from pympler.muppy import get_objects
        from pympler.summary import print_, summarize

        gc.collect()
        all_objects = get_objects()
        capmanager = request.config.pluginmanager.getplugin("capturemanager")
        with capmanager.global_and_fixture_disabled():
            print("\n[pympler/muppy] biggest objects summary:")
            print_(summarize(all_objects))


@pytest.fixture(scope="session", autouse=True)
def trace_malloc(request):
    if not request.config.getoption("trace_malloc"):
        yield
        return
    gc.collect()
    # Top-10 recipe from: https://docs.python.org/3/library/tracemalloc.html#display-the-top-10
    tracemalloc.start()
    snapshot1 = tracemalloc.take_snapshot().filter_traces(
        (
            tracemalloc.Filter(False, linecache.__file__),
            tracemalloc.Filter(False, tracemalloc.__file__),
        )
    )
    yield
    gc.collect()
    snapshot2 = tracemalloc.take_snapshot().filter_traces(
        (
            tracemalloc.Filter(False, linecache.__file__),
            tracemalloc.Filter(False, tracemalloc.__file__),
        )
    )
    top_stats = snapshot2.compare_to(snapshot1, "lineno")
    capmanager = request.config.pluginmanager.getplugin("capturemanager")
    with capmanager.global_and_fixture_disabled():
        print("[tracemalloc] Top 10 differences:")
        for stat in top_stats[:10]:
            print(stat)