File: parse_frames.py

package info (click to toggle)
python-websockets 15.0.1-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 2,948 kB
  • sloc: python: 25,105; javascript: 350; ansic: 148; makefile: 43
file content (101 lines) | stat: -rw-r--r-- 3,236 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
"""Benchark parsing WebSocket frames."""

import subprocess
import sys
import timeit

from websockets.extensions.permessage_deflate import PerMessageDeflate
from websockets.frames import Frame, Opcode
from websockets.streams import StreamReader


# 256kB of text, compressible by about 70%.
text = subprocess.check_output(["git", "log", "8dd8e410"], text=True)


def get_frame(size):
    repeat, remainder = divmod(size, 256 * 1024)
    payload = repeat * text + text[:remainder]
    return Frame(Opcode.TEXT, payload.encode(), True)


def parse_frame(data, count, mask, extensions):
    reader = StreamReader()
    for _ in range(count):
        reader.feed_data(data)
        parser = Frame.parse(
            reader.read_exact,
            mask=mask,
            extensions=extensions,
        )
        try:
            next(parser)
        except StopIteration:
            pass
        else:
            raise AssertionError("parser should return frame")
    reader.feed_eof()
    assert reader.at_eof(), "parser should consume all data"


def run_benchmark(size, count, compression=False, number=100):
    if compression:
        extensions = [PerMessageDeflate(True, True, 12, 12, {"memLevel": 5})]
    else:
        extensions = []
    globals = {
        "get_frame": get_frame,
        "parse_frame": parse_frame,
        "extensions": extensions,
    }
    sppf = (
        min(
            timeit.repeat(
                f"parse_frame(data, {count}, mask=True, extensions=extensions)",
                f"data = get_frame({size})"
                f".serialize(mask=True, extensions=extensions)",
                number=number,
                globals=globals,
            )
        )
        / number
        / count
        * 1_000_000
    )
    cppf = (
        min(
            timeit.repeat(
                f"parse_frame(data, {count}, mask=False, extensions=extensions)",
                f"data = get_frame({size})"
                f".serialize(mask=False, extensions=extensions)",
                number=number,
                globals=globals,
            )
        )
        / number
        / count
        * 1_000_000
    )
    print(f"{size}\t{compression}\t{sppf:.2f}\t{cppf:.2f}")


if __name__ == "__main__":
    print("Sizes are in bytes. Times are in µs per frame.", file=sys.stderr)
    print("Run `tabs -16` for clean output. Pipe stdout to TSV for saving.")
    print(file=sys.stderr)

    print("size\tcompression\tserver\tclient")
    run_benchmark(size=8, count=1000, compression=False)
    run_benchmark(size=60, count=1000, compression=False)
    run_benchmark(size=500, count=1000, compression=False)
    run_benchmark(size=4_000, count=1000, compression=False)
    run_benchmark(size=30_000, count=200, compression=False)
    run_benchmark(size=250_000, count=100, compression=False)
    run_benchmark(size=2_000_000, count=20, compression=False)

    run_benchmark(size=8, count=1000, compression=True)
    run_benchmark(size=60, count=1000, compression=True)
    run_benchmark(size=500, count=200, compression=True)
    run_benchmark(size=4_000, count=100, compression=True)
    run_benchmark(size=30_000, count=20, compression=True)
    run_benchmark(size=250_000, count=10, compression=True)