File: test_docstring_detection.py

package info (click to toggle)
flake8-quotes 3.4.0-4
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 300 kB
  • sloc: python: 938; sh: 28; makefile: 2
file content (45 lines) | stat: -rw-r--r-- 2,540 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
import tokenize
from unittest import TestCase

from flake8_quotes import Token, get_docstring_tokens
from test.test_checks import get_absolute_path


class GetDocstringTokensTests(TestCase):
    def _get_docstring_tokens(self, filename):
        with open(get_absolute_path(filename), 'r') as f:
            tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
        return get_docstring_tokens(tokens)

    def test_get_docstring_tokens_absent(self):
        self.assertEqual(self._get_docstring_tokens('data/doubles.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/doubles_multiline_string.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/doubles_noqa.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/doubles_wrapped.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/multiline_string.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/no_qa.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/singles.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/singles_multiline_string.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/singles_noqa.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/singles_wrapped.py'), set())
        self.assertEqual(self._get_docstring_tokens('data/docstring_not_docstrings.py'), set())

    def test_get_docstring_tokens_doubles(self):
        with open(get_absolute_path('data/docstring_doubles.py'), 'r') as f:
            tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
        docstring_tokens = {t.string for t in get_docstring_tokens(tokens)}
        self.assertEqual(docstring_tokens, {
            '"""\nDouble quotes multiline module docstring\n"""',
            '"""\n    Double quotes multiline class docstring\n    """',
            '"""\n        Double quotes multiline function docstring\n        """',
        })

    def test_get_docstring_tokens_singles(self):
        with open(get_absolute_path('data/docstring_singles.py'), 'r') as f:
            tokens = [Token(t) for t in tokenize.generate_tokens(f.readline)]
        docstring_tokens = {t.string for t in get_docstring_tokens(tokens)}
        self.assertEqual(docstring_tokens, {
            "'''\nSingle quotes multiline module docstring\n'''",
            "'''\n    Single quotes multiline class docstring\n    '''",
            "'''\n        Single quotes multiline function docstring\n        '''",
        })