44# Distributed under the terms of the Modified BSD License.
55
66from unittest import TestCase
7+ from pygments import __version__ as pygments_version
78from pygments .token import Token
89from pygments .lexers import BashLexer
910
1011from .. import lexers
1112
13+ pyg214 = tuple (int (x ) for x in pygments_version .split ("." )[:2 ]) >= (2 , 14 )
14+
1215
1316class TestLexers (TestCase ):
1417 """Collection of lexers tests"""
@@ -18,34 +21,35 @@ def setUp(self):
1821
1922 def testIPythonLexer (self ):
2023 fragment = '!echo $HOME\n '
21- tokens = [
24+ bash_tokens = [
2225 (Token .Operator , '!' ),
2326 ]
24- tokens .extend (self .bash_lexer .get_tokens (fragment [1 :]))
25- self .assertEqual (tokens , list (self .lexer .get_tokens (fragment )))
27+ bash_tokens .extend (self .bash_lexer .get_tokens (fragment [1 :]))
28+ ipylex_token = list (self .lexer .get_tokens (fragment ))
29+ assert bash_tokens [:- 1 ] == ipylex_token [:- 1 ]
2630
27- fragment_2 = '!' + fragment
31+ fragment_2 = "!" + fragment
2832 tokens_2 = [
2933 (Token .Operator , '!!' ),
30- ] + tokens [1 :]
31- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 )))
34+ ] + bash_tokens [1 :]
35+ assert tokens_2 [: - 1 ] == list (self .lexer .get_tokens (fragment_2 ))[: - 1 ]
3236
3337 fragment_2 = '\t %%!\n ' + fragment [1 :]
3438 tokens_2 = [
3539 (Token .Text , '\t ' ),
3640 (Token .Operator , '%%!' ),
3741 (Token .Text , '\n ' ),
38- ] + tokens [1 :]
39- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 ) ))
42+ ] + bash_tokens [1 :]
43+ assert tokens_2 == list (self .lexer .get_tokens (fragment_2 ))
4044
4145 fragment_2 = 'x = ' + fragment
4246 tokens_2 = [
4347 (Token .Name , 'x' ),
4448 (Token .Text , ' ' ),
4549 (Token .Operator , '=' ),
4650 (Token .Text , ' ' ),
47- ] + tokens
48- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 )))
51+ ] + bash_tokens
52+ assert tokens_2 [: - 1 ] == list (self .lexer .get_tokens (fragment_2 ))[: - 1 ]
4953
5054 fragment_2 = 'x, = ' + fragment
5155 tokens_2 = [
@@ -54,8 +58,8 @@ def testIPythonLexer(self):
5458 (Token .Text , ' ' ),
5559 (Token .Operator , '=' ),
5660 (Token .Text , ' ' ),
57- ] + tokens
58- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 )))
61+ ] + bash_tokens
62+ assert tokens_2 [: - 1 ] == list (self .lexer .get_tokens (fragment_2 ))[: - 1 ]
5963
6064 fragment_2 = 'x, = %sx ' + fragment [1 :]
6165 tokens_2 = [
@@ -67,8 +71,10 @@ def testIPythonLexer(self):
6771 (Token .Operator , '%' ),
6872 (Token .Keyword , 'sx' ),
6973 (Token .Text , ' ' ),
70- ] + tokens [1 :]
71- self .assertEqual (tokens_2 , list (self .lexer .get_tokens (fragment_2 )))
74+ ] + bash_tokens [1 :]
75+ if tokens_2 [7 ] == (Token .Text , " " ) and pyg214 : # pygments 2.14+
76+ tokens_2 [7 ] = (Token .Text .Whitespace , " " )
77+ assert tokens_2 [:- 1 ] == list (self .lexer .get_tokens (fragment_2 ))[:- 1 ]
7278
7379 fragment_2 = 'f = %R function () {}\n '
7480 tokens_2 = [
@@ -80,7 +86,7 @@ def testIPythonLexer(self):
8086 (Token .Keyword , 'R' ),
8187 (Token .Text , ' function () {}\n ' ),
8288 ]
83- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 ) ))
89+ assert tokens_2 == list (self .lexer .get_tokens (fragment_2 ))
8490
8591 fragment_2 = '\t %%xyz\n $foo\n '
8692 tokens_2 = [
@@ -89,7 +95,7 @@ def testIPythonLexer(self):
8995 (Token .Keyword , 'xyz' ),
9096 (Token .Text , '\n $foo\n ' ),
9197 ]
92- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 ) ))
98+ assert tokens_2 == list (self .lexer .get_tokens (fragment_2 ))
9399
94100 fragment_2 = '%system?\n '
95101 tokens_2 = [
@@ -98,7 +104,7 @@ def testIPythonLexer(self):
98104 (Token .Operator , '?' ),
99105 (Token .Text , '\n ' ),
100106 ]
101- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 )))
107+ assert tokens_2 [: - 1 ] == list (self .lexer .get_tokens (fragment_2 ))[: - 1 ]
102108
103109 fragment_2 = 'x != y\n '
104110 tokens_2 = [
@@ -109,7 +115,7 @@ def testIPythonLexer(self):
109115 (Token .Name , 'y' ),
110116 (Token .Text , '\n ' ),
111117 ]
112- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 )))
118+ assert tokens_2 [: - 1 ] == list (self .lexer .get_tokens (fragment_2 ))[: - 1 ]
113119
114120 fragment_2 = ' ?math.sin\n '
115121 tokens_2 = [
@@ -118,15 +124,15 @@ def testIPythonLexer(self):
118124 (Token .Text , 'math.sin' ),
119125 (Token .Text , '\n ' ),
120126 ]
121- self . assertEqual ( tokens_2 , list (self .lexer .get_tokens (fragment_2 )))
127+ assert tokens_2 [: - 1 ] == list (self .lexer .get_tokens (fragment_2 ))[: - 1 ]
122128
123129 fragment = ' *int*?\n '
124130 tokens = [
125131 (Token .Text , ' *int*' ),
126132 (Token .Operator , '?' ),
127133 (Token .Text , '\n ' ),
128134 ]
129- self . assertEqual ( tokens , list (self .lexer .get_tokens (fragment ) ))
135+ assert tokens == list (self .lexer .get_tokens (fragment ))
130136
131137 fragment = '%%writefile -a foo.py\n if a == b:\n pass'
132138 tokens = [
@@ -145,7 +151,9 @@ def testIPythonLexer(self):
145151 (Token .Keyword , 'pass' ),
146152 (Token .Text , '\n ' ),
147153 ]
148- self .assertEqual (tokens , list (self .lexer .get_tokens (fragment )))
154+ if tokens [10 ] == (Token .Text , "\n " ) and pyg214 : # pygments 2.14+
155+ tokens [10 ] = (Token .Text .Whitespace , "\n " )
156+ assert tokens [:- 1 ] == list (self .lexer .get_tokens (fragment ))[:- 1 ]
149157
150158 fragment = '%%timeit\n math.sin(0)'
151159 tokens = [
@@ -173,4 +181,4 @@ def testIPythonLexer(self):
173181 (Token .Punctuation , '>' ),
174182 (Token .Text , '\n ' ),
175183 ]
176- self . assertEqual ( tokens , list (self .lexer .get_tokens (fragment ) ))
184+ assert tokens == list (self .lexer .get_tokens (fragment ))
0 commit comments