1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92
|
# some tests for the renderer classes
from __future__ import annotations
import time
import pytest
from pyinstrument import renderers
from pyinstrument.profiler import Profiler
from .fake_time_util import fake_time
# utils
frame_renderer_classes: list[type[renderers.FrameRenderer]] = [
renderers.ConsoleRenderer,
renderers.JSONRenderer,
renderers.PstatsRenderer,
renderers.SpeedscopeRenderer,
]
parametrize_frame_renderer_class = pytest.mark.parametrize(
"frame_renderer_class", frame_renderer_classes, ids=lambda c: c.__name__
)
# fixtures
def a():
b()
c()
def b():
d()
def c():
d()
def d():
e()
def e():
time.sleep(1)
@pytest.fixture(scope="module")
def profiler_session():
with fake_time():
profiler = Profiler()
profiler.start()
a()
profiler.stop()
return profiler.last_session
# tests
@parametrize_frame_renderer_class
def test_empty_profile(frame_renderer_class: type[renderers.FrameRenderer]):
with Profiler() as profiler:
pass
profiler.output(renderer=frame_renderer_class())
@parametrize_frame_renderer_class
def test_timeline_doesnt_crash(
profiler_session, frame_renderer_class: type[renderers.FrameRenderer]
):
renderer = frame_renderer_class(timeline=True)
renderer.render(profiler_session)
@parametrize_frame_renderer_class
def test_show_all_doesnt_crash(
profiler_session, frame_renderer_class: type[renderers.FrameRenderer]
):
renderer = frame_renderer_class(show_all=True)
renderer.render(profiler_session)
@pytest.mark.parametrize("flat_time", ["self", "total"])
def test_console_renderer_flat_doesnt_crash(profiler_session, flat_time):
renderer = renderers.ConsoleRenderer(flat=True, flat_time=flat_time)
renderer.render(profiler_session)
|