1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170
|
From: Matthias Bussonnier <bussonniermatthias@gmail.com>
Date: Tue, 3 Jan 2023 11:57:18 +0100
Subject: Fix tests for pygments > 2.14
Pygments 2.14+ have the bash lexer return some tokens as
Text.Whitespace instead of Text, this update the test to support this.
---
IPython/lib/tests/test_lexers.py | 52 +++++++++++++++++++++++-----------------
1 file changed, 30 insertions(+), 22 deletions(-)
diff --git a/IPython/lib/tests/test_lexers.py b/IPython/lib/tests/test_lexers.py
index efa00d6..000b8fe 100644
--- a/IPython/lib/tests/test_lexers.py
+++ b/IPython/lib/tests/test_lexers.py
@@ -4,11 +4,14 @@
# Distributed under the terms of the Modified BSD License.
from unittest import TestCase
+from pygments import __version__ as pygments_version
from pygments.token import Token
from pygments.lexers import BashLexer
from .. import lexers
+pyg214 = tuple(int(x) for x in pygments_version.split(".")[:2]) >= (2, 14)
+
class TestLexers(TestCase):
"""Collection of lexers tests"""
@@ -18,25 +21,26 @@ def setUp(self):
def testIPythonLexer(self):
fragment = '!echo $HOME\n'
- tokens = [
+ bash_tokens = [
(Token.Operator, '!'),
]
- tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+ bash_tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
+ ipylex_token = list(self.lexer.get_tokens(fragment))
+ assert bash_tokens[:-1] == ipylex_token[:-1]
- fragment_2 = '!' + fragment
+ fragment_2 = "!" + fragment
tokens_2 = [
(Token.Operator, '!!'),
- ] + tokens[1:]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ ] + bash_tokens[1:]
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
fragment_2 = '\t %%!\n' + fragment[1:]
tokens_2 = [
(Token.Text, '\t '),
(Token.Operator, '%%!'),
(Token.Text, '\n'),
- ] + tokens[1:]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ ] + bash_tokens[1:]
+ assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
fragment_2 = 'x = ' + fragment
tokens_2 = [
@@ -44,8 +48,8 @@ def testIPythonLexer(self):
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
- ] + tokens
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ ] + bash_tokens
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
fragment_2 = 'x, = ' + fragment
tokens_2 = [
@@ -54,8 +58,8 @@ def testIPythonLexer(self):
(Token.Text, ' '),
(Token.Operator, '='),
(Token.Text, ' '),
- ] + tokens
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ ] + bash_tokens
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
fragment_2 = 'x, = %sx ' + fragment[1:]
tokens_2 = [
@@ -67,8 +71,10 @@ def testIPythonLexer(self):
(Token.Operator, '%'),
(Token.Keyword, 'sx'),
(Token.Text, ' '),
- ] + tokens[1:]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ ] + bash_tokens[1:]
+ if tokens_2[7] == (Token.Text, " ") and pyg214: # pygments 2.14+
+ tokens_2[7] = (Token.Text.Whitespace, " ")
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
fragment_2 = 'f = %R function () {}\n'
tokens_2 = [
@@ -80,7 +86,7 @@ def testIPythonLexer(self):
(Token.Keyword, 'R'),
(Token.Text, ' function () {}\n'),
]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
fragment_2 = '\t%%xyz\n$foo\n'
tokens_2 = [
@@ -89,7 +95,7 @@ def testIPythonLexer(self):
(Token.Keyword, 'xyz'),
(Token.Text, '\n$foo\n'),
]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ assert tokens_2 == list(self.lexer.get_tokens(fragment_2))
fragment_2 = '%system?\n'
tokens_2 = [
@@ -98,7 +104,7 @@ def testIPythonLexer(self):
(Token.Operator, '?'),
(Token.Text, '\n'),
]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
fragment_2 = 'x != y\n'
tokens_2 = [
@@ -109,7 +115,7 @@ def testIPythonLexer(self):
(Token.Name, 'y'),
(Token.Text, '\n'),
]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
fragment_2 = ' ?math.sin\n'
tokens_2 = [
@@ -118,7 +124,7 @@ def testIPythonLexer(self):
(Token.Text, 'math.sin'),
(Token.Text, '\n'),
]
- self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
+ assert tokens_2[:-1] == list(self.lexer.get_tokens(fragment_2))[:-1]
fragment = ' *int*?\n'
tokens = [
@@ -126,7 +132,7 @@ def testIPythonLexer(self):
(Token.Operator, '?'),
(Token.Text, '\n'),
]
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+ assert tokens == list(self.lexer.get_tokens(fragment))
fragment = '%%writefile -a foo.py\nif a == b:\n pass'
tokens = [
@@ -145,7 +151,9 @@ def testIPythonLexer(self):
(Token.Keyword, 'pass'),
(Token.Text, '\n'),
]
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+ if tokens[10] == (Token.Text, "\n") and pyg214: # pygments 2.14+
+ tokens[10] = (Token.Text.Whitespace, "\n")
+ assert tokens[:-1] == list(self.lexer.get_tokens(fragment))[:-1]
fragment = '%%timeit\nmath.sin(0)'
tokens = [
@@ -173,4 +181,4 @@ def testIPythonLexer(self):
(Token.Punctuation, '>'),
(Token.Text, '\n'),
]
- self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
+ assert tokens == list(self.lexer.get_tokens(fragment))
|