File: multi_parallel.py

package info (click to toggle)
python-scrapy 2.14.0-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 6,308 kB
  • sloc: python: 55,321; xml: 199; makefile: 25; sh: 7
file content (26 lines) | stat: -rw-r--r-- 576 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
from twisted.internet.task import react

from scrapy import Spider
from scrapy.crawler import CrawlerRunner
from scrapy.utils.log import configure_logging
from scrapy.utils.reactor import install_reactor


class NoRequestsSpider(Spider):
    name = "no_request"

    async def start(self):
        return
        yield


def main(reactor):
    configure_logging()
    runner = CrawlerRunner()
    runner.crawl(NoRequestsSpider)
    runner.crawl(NoRequestsSpider)
    return runner.join()


install_reactor("twisted.internet.asyncioreactor.AsyncioSelectorReactor")
react(main)