1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
|
"""
A spider that generate light requests to measure QPS throughput
usage:
scrapy runspider qpsclient.py --loglevel=INFO --set RANDOMIZE_DOWNLOAD_DELAY=0
--set CONCURRENT_REQUESTS=50 -a qps=10 -a latency=0.3
"""
from scrapy.http import Request
from scrapy.spiders import Spider
class QPSSpider(Spider):
name = "qps"
benchurl = "http://localhost:8880/"
# Max concurrency is limited by global CONCURRENT_REQUESTS setting
max_concurrent_requests = 8
# Requests per second goal
qps = None # same as: 1 / download_delay
download_delay = None
# time in seconds to delay server responses
latency = None
# number of slots to create
slots = 1
def __init__(self, *a, **kw):
super().__init__(*a, **kw)
if self.qps is not None:
self.qps = float(self.qps)
self.download_delay = 1 / self.qps
elif self.download_delay is not None:
self.download_delay = float(self.download_delay)
async def start(self):
for item_or_request in self.start_requests():
yield item_or_request
def start_requests(self):
url = self.benchurl
if self.latency is not None:
url += f"?latency={self.latency}"
slots = int(self.slots)
if slots > 1:
urls = [url.replace("localhost", f"127.0.0.{x + 1}") for x in range(slots)]
else:
urls = [url]
idx = 0
while True:
url = urls[idx % len(urls)]
yield Request(url, dont_filter=True)
idx += 1
def parse(self, response):
pass
|