File: test_tokenizer.py

package info (click to toggle)
mopidy-mpd 3.3.0-1
  • links: PTS, VCS
  • area: main
  • in suites: bookworm, forky, sid, trixie
  • size: 680 kB
  • sloc: python: 7,641; makefile: 3
file content (147 lines) | stat: -rw-r--r-- 6,427 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
import unittest

from mopidy_mpd import exceptions, tokenize


class TestTokenizer(unittest.TestCase):
    def assertTokenizeEquals(self, expected, line):  # noqa: N802
        assert expected == tokenize.split(line)

    def assertTokenizeRaises(self, exception, message, line):  # noqa: N802
        with self.assertRaises(exception) as cm:
            tokenize.split(line)
        assert cm.exception.message == message

    def test_empty_string(self):
        ex = exceptions.MpdNoCommand
        msg = "No command given"
        self.assertTokenizeRaises(ex, msg, "")
        self.assertTokenizeRaises(ex, msg, "      ")
        self.assertTokenizeRaises(ex, msg, "\t\t\t")

    def test_command(self):
        self.assertTokenizeEquals(["test"], "test")
        self.assertTokenizeEquals(["test123"], "test123")
        self.assertTokenizeEquals(["foo_bar"], "foo_bar")

    def test_command_trailing_whitespace(self):
        self.assertTokenizeEquals(["test"], "test   ")
        self.assertTokenizeEquals(["test"], "test\t\t\t")

    def test_command_leading_whitespace(self):
        ex = exceptions.MpdUnknownError
        msg = "Letter expected"
        self.assertTokenizeRaises(ex, msg, "  test")
        self.assertTokenizeRaises(ex, msg, "\ttest")

    def test_invalid_command(self):
        ex = exceptions.MpdUnknownError
        msg = "Invalid word character"
        self.assertTokenizeRaises(ex, msg, "foo/bar")
        self.assertTokenizeRaises(ex, msg, "æøå")
        self.assertTokenizeRaises(ex, msg, "test?")
        self.assertTokenizeRaises(ex, msg, 'te"st')

    def test_unquoted_param(self):
        self.assertTokenizeEquals(["test", "param"], "test param")
        self.assertTokenizeEquals(["test", "param"], "test\tparam")

    def test_unquoted_param_leading_whitespace(self):
        self.assertTokenizeEquals(["test", "param"], "test  param")
        self.assertTokenizeEquals(["test", "param"], "test\t\tparam")

    def test_unquoted_param_trailing_whitespace(self):
        self.assertTokenizeEquals(["test", "param"], "test param  ")
        self.assertTokenizeEquals(["test", "param"], "test param\t\t")

    def test_unquoted_param_invalid_chars(self):
        ex = exceptions.MpdArgError
        msg = "Invalid unquoted character"
        self.assertTokenizeRaises(ex, msg, 'test par"m')
        self.assertTokenizeRaises(ex, msg, "test foo\bbar")
        self.assertTokenizeRaises(ex, msg, 'test foo"bar"baz')
        self.assertTokenizeRaises(ex, msg, "test foo'bar")

    def test_unquoted_param_numbers(self):
        self.assertTokenizeEquals(["test", "123"], "test 123")
        self.assertTokenizeEquals(["test", "+123"], "test +123")
        self.assertTokenizeEquals(["test", "-123"], "test -123")
        self.assertTokenizeEquals(["test", "3.14"], "test 3.14")

    def test_unquoted_param_extended_chars(self):
        self.assertTokenizeEquals(["test", "æøå"], "test æøå")
        self.assertTokenizeEquals(["test", "?#$"], "test ?#$")
        self.assertTokenizeEquals(["test", "/foo/bar/"], "test /foo/bar/")
        self.assertTokenizeEquals(["test", "foo\\bar"], "test foo\\bar")

    def test_unquoted_params(self):
        self.assertTokenizeEquals(["test", "foo", "bar"], "test foo bar")

    def test_quoted_param(self):
        self.assertTokenizeEquals(["test", "param"], 'test "param"')
        self.assertTokenizeEquals(["test", "param"], 'test\t"param"')

    def test_quoted_param_leading_whitespace(self):
        self.assertTokenizeEquals(["test", "param"], 'test  "param"')
        self.assertTokenizeEquals(["test", "param"], 'test\t\t"param"')

    def test_quoted_param_trailing_whitespace(self):
        self.assertTokenizeEquals(["test", "param"], 'test "param"  ')
        self.assertTokenizeEquals(["test", "param"], 'test "param"\t\t')

    def test_quoted_param_invalid_chars(self):
        ex = exceptions.MpdArgError
        msg = "Space expected after closing '\"'"
        self.assertTokenizeRaises(ex, msg, 'test "foo"bar"')
        self.assertTokenizeRaises(ex, msg, 'test "foo"bar" ')
        self.assertTokenizeRaises(ex, msg, 'test "foo"bar')
        self.assertTokenizeRaises(ex, msg, 'test "foo"bar ')

    def test_quoted_param_numbers(self):
        self.assertTokenizeEquals(["test", "123"], 'test "123"')
        self.assertTokenizeEquals(["test", "+123"], 'test "+123"')
        self.assertTokenizeEquals(["test", "-123"], 'test "-123"')
        self.assertTokenizeEquals(["test", "3.14"], 'test "3.14"')

    def test_quoted_param_spaces(self):
        self.assertTokenizeEquals(["test", "foo bar"], 'test "foo bar"')
        self.assertTokenizeEquals(["test", "foo bar"], 'test "foo bar"')
        self.assertTokenizeEquals(["test", " param\t"], 'test " param\t"')

    def test_quoted_param_extended_chars(self):
        self.assertTokenizeEquals(["test", "æøå"], 'test "æøå"')
        self.assertTokenizeEquals(["test", "?#$"], 'test "?#$"')
        self.assertTokenizeEquals(["test", "/foo/bar/"], 'test "/foo/bar/"')

    def test_quoted_param_escaping(self):
        self.assertTokenizeEquals(["test", "\\"], r'test "\\"')
        self.assertTokenizeEquals(["test", '"'], r'test "\""')
        self.assertTokenizeEquals(["test", " "], r'test "\ "')
        self.assertTokenizeEquals(["test", "\\n"], r'test "\\\n"')

    def test_quoted_params(self):
        self.assertTokenizeEquals(["test", "foo", "bar"], 'test "foo" "bar"')

    def test_mixed_params(self):
        self.assertTokenizeEquals(["test", "foo", "bar"], 'test foo "bar"')
        self.assertTokenizeEquals(["test", "foo", "bar"], 'test "foo" bar')
        self.assertTokenizeEquals(["test", "1", "2"], 'test 1 "2"')
        self.assertTokenizeEquals(["test", "1", "2"], 'test "1" 2')

        self.assertTokenizeEquals(
            ["test", "foo bar", "baz", "123"], 'test "foo bar" baz 123'
        )
        self.assertTokenizeEquals(
            ["test", 'foo"bar', "baz", "123"], r'test "foo\"bar" baz 123'
        )

    def test_unbalanced_quotes(self):
        ex = exceptions.MpdArgError
        msg = "Invalid unquoted character"
        self.assertTokenizeRaises(ex, msg, 'test "foo bar" baz"')

    def test_missing_closing_quote(self):
        ex = exceptions.MpdArgError
        msg = "Missing closing '\"'"
        self.assertTokenizeRaises(ex, msg, 'test "foo')
        self.assertTokenizeRaises(ex, msg, 'test "foo a ')