File: benchmarks.py

package info (click to toggle)
httpie 3.2.4-4
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 2,904 kB
  • sloc: python: 13,760; xml: 162; makefile: 141; ruby: 79; sh: 32
file content (202 lines) | stat: -rw-r--r-- 5,925 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
"""
This file is the declaration of benchmarks for HTTPie. It
is also used to run them with the current environment.

Each instance of BaseRunner class will be an individual
benchmark. And if run without any arguments, this file
will execute every benchmark instance and report the
timings.

The benchmarks are run through 'pyperf', which allows to
do get very precise results. For micro-benchmarks like startup,
please run `pyperf system tune` to get even more accurate results.

Examples:

    # Run everything as usual, the default is that we do 3 warm-up runs
    # and 5 actual runs.
    $ python extras/profiling/benchmarks.py

    # For retrieving results faster, pass --fast
    $ python extras/profiling/benchmarks.py --fast

    # For verify everything works as expected, pass --debug-single-value.
    # It will only run everything once, so the resuls are not reliable. But
    # very useful when iterating on a benchmark
    $ python extras/profiling/benchmarks.py --debug-single-value

    # If you want to run with a custom HTTPie command (for example with
    # and HTTPie instance installed in another virtual environment),
    # pass HTTPIE_COMMAND variable.
    $ HTTPIE_COMMAND="/my/python /my/httpie" python extras/profiling/benchmarks.py
"""

from __future__ import annotations

import os
import shlex
import subprocess
import sys
import threading
from contextlib import ExitStack, contextmanager
from dataclasses import dataclass, field
from functools import cached_property, partial
from http.server import HTTPServer, SimpleHTTPRequestHandler
from tempfile import TemporaryDirectory
from typing import ClassVar, Final, List

import pyperf

# For download benchmarks, define a set of files.
# file: (block_size, count) => total_size = block_size * count
PREDEFINED_FILES: Final = {'3G': (3 * 1024 ** 2, 1024)}


class QuietSimpleHTTPServer(SimpleHTTPRequestHandler):
    def log_message(self, *args, **kwargs):
        pass


@contextmanager
def start_server():
    """Create a server to serve local files. It will create the
    PREDEFINED_FILES through dd."""
    with TemporaryDirectory() as directory:
        for file_name, (block_size, count) in PREDEFINED_FILES.items():
            subprocess.check_call(
                [
                    'dd',
                    'if=/dev/zero',
                    f'of={file_name}',
                    f'bs={block_size}',
                    f'count={count}',
                ],
                cwd=directory,
                stdout=subprocess.DEVNULL,
                stderr=subprocess.DEVNULL,
            )

        handler = partial(QuietSimpleHTTPServer, directory=directory)
        server = HTTPServer(('localhost', 0), handler)

        thread = threading.Thread(target=server.serve_forever)
        thread.start()
        yield '{}:{}'.format(*server.socket.getsockname())
        server.shutdown()
        thread.join(timeout=0.5)


@dataclass
class Context:
    benchmarks: ClassVar[List[BaseRunner]] = []
    stack: ExitStack = field(default_factory=ExitStack)
    runner: pyperf.Runner = field(default_factory=pyperf.Runner)

    def run(self) -> pyperf.BenchmarkSuite:
        results = [benchmark.run(self) for benchmark in self.benchmarks]
        return pyperf.BenchmarkSuite(results)

    @property
    def cmd(self) -> List[str]:
        if cmd := os.getenv('HTTPIE_COMMAND'):
            return shlex.split(cmd)

        http = os.path.join(os.path.dirname(sys.executable), 'http')
        assert os.path.exists(http)
        return [sys.executable, http]

    @cached_property
    def server(self) -> str:
        return self.stack.enter_context(start_server())

    def __enter__(self):
        return self

    def __exit__(self, *exc_info):
        self.stack.close()


@dataclass
class BaseRunner:
    """
    An individual benchmark case. By default it has the category
    (e.g like startup or download) and a name.
    """

    category: str
    title: str

    def __post_init__(self):
        Context.benchmarks.append(self)

    def run(self, context: Context) -> pyperf.Benchmark:
        raise NotImplementedError

    @property
    def name(self) -> str:
        return f'{self.title} ({self.category})'


@dataclass
class CommandRunner(BaseRunner):
    """
    Run a single command, and benchmark it.
    """

    args: List[str]

    def run(self, context: Context) -> pyperf.Benchmark:
        return context.runner.bench_command(self.name, [*context.cmd, *self.args])


@dataclass
class DownloadRunner(BaseRunner):
    """
    Benchmark downloading a single file from the
    remote server.
    """

    file_name: str

    def run(self, context: Context) -> pyperf.Benchmark:
        return context.runner.bench_command(
            self.name,
            [
                *context.cmd,
                '--download',
                'GET',
                f'{context.server}/{self.file_name}',
            ],
        )


CommandRunner('startup', '`http --version`', ['--version'])
CommandRunner('startup', '`http --offline pie.dev/get`', ['--offline', 'pie.dev/get'])
for pretty in ['all', 'none']:
    CommandRunner(
        'startup',
        f'`http --pretty={pretty} pie.dev/stream/1000`',
        [
            '--print=HBhb',
            f'--pretty={pretty}',
            'httpbin.org/stream/1000'
        ]
    )
DownloadRunner('download', '`http --download :/big_file.txt` (3GB)', '3G')


def main() -> None:
    # PyPerf will bring it's own argument parser, so configure the script.
    # The somewhat fast and also precise enough configuration is this. We run
    # benchmarks 3 times to warm up (e.g especially for download benchmark, this
    # is important). And then 5 actual runs where we record.
    sys.argv.extend(
        ['--worker', '--loops=1', '--warmup=3', '--values=5', '--processes=2']
    )

    with Context() as context:
        context.run()


if __name__ == '__main__':
    main()