File: benchmark.py

package info (click to toggle)
python-urllib3 1.26.12-1%2Bdeb12u1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 2,652 kB
  • sloc: python: 17,628; makefile: 130; sh: 18
file content (76 lines) | stat: -rw-r--r-- 2,085 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
#!/usr/bin/env python

"""
Really simple rudimentary benchmark to compare ConnectionPool versus standard
urllib to demonstrate the usefulness of connection re-using.
"""
from __future__ import print_function

import sys
import time
import urllib

sys.path.append("../")
import urllib3  # noqa: E402

# URLs to download. Doesn't matter as long as they're from the same host, so we
# can take advantage of connection re-using.
TO_DOWNLOAD = [
    "http://code.google.com/apis/apps/",
    "http://code.google.com/apis/base/",
    "http://code.google.com/apis/blogger/",
    "http://code.google.com/apis/calendar/",
    "http://code.google.com/apis/codesearch/",
    "http://code.google.com/apis/contact/",
    "http://code.google.com/apis/books/",
    "http://code.google.com/apis/documents/",
    "http://code.google.com/apis/finance/",
    "http://code.google.com/apis/health/",
    "http://code.google.com/apis/notebook/",
    "http://code.google.com/apis/picasaweb/",
    "http://code.google.com/apis/spreadsheets/",
    "http://code.google.com/apis/webmastertools/",
    "http://code.google.com/apis/youtube/",
]


def urllib_get(url_list):
    assert url_list
    for url in url_list:
        now = time.time()
        urllib.urlopen(url)
        elapsed = time.time() - now
        print("Got in %0.3f: %s" % (elapsed, url))


def pool_get(url_list):
    assert url_list
    pool = urllib3.PoolManager()
    for url in url_list:
        now = time.time()
        pool.request("GET", url, assert_same_host=False)
        elapsed = time.time() - now
        print("Got in %0.3fs: %s" % (elapsed, url))


if __name__ == "__main__":
    print("Running pool_get ...")
    now = time.time()
    pool_get(TO_DOWNLOAD)
    pool_elapsed = time.time() - now

    print("Running urllib_get ...")
    now = time.time()
    urllib_get(TO_DOWNLOAD)
    urllib_elapsed = time.time() - now

    print("Completed pool_get in %0.3fs" % pool_elapsed)
    print("Completed urllib_get in %0.3fs" % urllib_elapsed)


"""
Example results:

Completed pool_get in 1.163s
Completed urllib_get in 2.318s
"""