File: __init__.py

package info (click to toggle)
python-scrapy 0.24.2-1
  • links: PTS, VCS
  • area: main
  • in suites: jessie, jessie-kfreebsd
  • size: 3,240 kB
  • ctags: 4,259
  • sloc: python: 21,170; xml: 199; makefile: 67; sh: 44
file content (32 lines) | stat: -rw-r--r-- 1,270 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
"""
Item pipeline

See documentation in docs/item-pipeline.rst
"""

from scrapy.middleware import MiddlewareManager
from scrapy.utils.conf import build_component_list

class ItemPipelineManager(MiddlewareManager):

    component_name = 'item pipeline'

    @classmethod
    def _get_mwlist_from_settings(cls, settings):
        item_pipelines = settings['ITEM_PIPELINES']
        if isinstance(item_pipelines, (tuple, list, set, frozenset)):
            from scrapy.exceptions import ScrapyDeprecationWarning
            import warnings
            warnings.warn('ITEM_PIPELINES defined as a list or a set is deprecated, switch to a dict',
                category=ScrapyDeprecationWarning, stacklevel=1)
            # convert old ITEM_PIPELINE list to a dict with order 500
            item_pipelines = dict(zip(item_pipelines, range(500, 500+len(item_pipelines))))
        return build_component_list(settings['ITEM_PIPELINES_BASE'], item_pipelines)

    def _add_middleware(self, pipe):
        super(ItemPipelineManager, self)._add_middleware(pipe)
        if hasattr(pipe, 'process_item'):
            self.methods['process_item'].append(pipe.process_item)

    def process_item(self, item, spider):
        return self._process_chain('process_item', item, spider)