1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
|
import sys
from twisted.internet.defer import Deferred
import scrapy
from scrapy.crawler import CrawlerProcess
from scrapy.utils.defer import maybe_deferred_to_future
class SleepingSpider(scrapy.Spider):
name = "sleeping"
start_urls = ["data:,;"]
async def parse(self, response):
from twisted.internet import reactor
d = Deferred()
reactor.callLater(int(sys.argv[1]), d.callback, None)
await maybe_deferred_to_future(d)
process = CrawlerProcess(settings={})
process.crawl(SleepingSpider)
process.start()
|