File: test_utils_project.py

package info (click to toggle)
python-scrapy 2.4.1-2%2Bdeb11u1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 4,748 kB
  • sloc: python: 32,888; xml: 199; makefile: 90; sh: 7
file content (97 lines) | stat: -rw-r--r-- 3,018 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import unittest
import os
import tempfile
import shutil
import contextlib

from pytest import warns

from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.project import data_path, get_project_settings


@contextlib.contextmanager
def inside_a_project():
    prev_dir = os.getcwd()
    project_dir = tempfile.mkdtemp()

    try:
        os.chdir(project_dir)
        with open('scrapy.cfg', 'w') as f:
            # create an empty scrapy.cfg
            f.close()

        yield project_dir
    finally:
        os.chdir(prev_dir)
        shutil.rmtree(project_dir)


class ProjectUtilsTest(unittest.TestCase):
    def test_data_path_outside_project(self):
        self.assertEqual(
            os.path.join('.scrapy', 'somepath'),
            data_path('somepath')
        )
        abspath = os.path.join(os.path.sep, 'absolute', 'path')
        self.assertEqual(abspath, data_path(abspath))

    def test_data_path_inside_project(self):
        with inside_a_project() as proj_path:
            expected = os.path.join(proj_path, '.scrapy', 'somepath')
            self.assertEqual(
                os.path.realpath(expected),
                os.path.realpath(data_path('somepath'))
            )
            abspath = os.path.join(os.path.sep, 'absolute', 'path')
            self.assertEqual(abspath, data_path(abspath))


@contextlib.contextmanager
def set_env(**update):
    modified = set(update.keys()) & set(os.environ.keys())
    update_after = {k: os.environ[k] for k in modified}
    remove_after = frozenset(k for k in update if k not in os.environ)
    try:
        os.environ.update(update)
        yield
    finally:
        os.environ.update(update_after)
        for k in remove_after:
            os.environ.pop(k)


class GetProjectSettingsTestCase(unittest.TestCase):

    def test_valid_envvar(self):
        value = 'tests.test_cmdline.settings'
        envvars = {
            'SCRAPY_SETTINGS_MODULE': value,
        }
        with set_env(**envvars), warns(None) as warnings:
            settings = get_project_settings()
        assert not warnings
        assert settings.get('SETTINGS_MODULE') == value

    def test_invalid_envvar(self):
        envvars = {
            'SCRAPY_FOO': 'bar',
        }
        with set_env(**envvars), warns(None) as warnings:
            get_project_settings()
        assert len(warnings) == 1
        assert warnings[0].category == ScrapyDeprecationWarning
        assert str(warnings[0].message).endswith(': FOO')

    def test_valid_and_invalid_envvars(self):
        value = 'tests.test_cmdline.settings'
        envvars = {
            'SCRAPY_FOO': 'bar',
            'SCRAPY_SETTINGS_MODULE': value,
        }
        with set_env(**envvars), warns(None) as warnings:
            settings = get_project_settings()
        assert len(warnings) == 1
        assert warnings[0].category == ScrapyDeprecationWarning
        assert str(warnings[0].message).endswith(': FOO')
        assert settings.get('SETTINGS_MODULE') == value