44
55This includes:
66
7- IPythonLexer, IPython3Lexer
8- Lexers for pure IPython (python + magic/shell commands)
7+ IPython3Lexer
8+ Lexer for pure IPython (python + magic/shell commands)
99
1010 IPythonPartialTracebackLexer, IPythonTracebackLexer
11- Supports 2.x and 3.x via keyword `python3`. The partial traceback
12- lexer reads everything but the Python code appearing in a traceback.
13- The full lexer combines the partial lexer with an IPython lexer .
11+ The partial traceback lexer reads everything but the Python code
12+ appearing in a traceback.
13+ The full lexer combines the partial lexer with the IPython3Lexer .
1414
1515 IPythonConsoleLexer
1616 A lexer for IPython console sessions, with support for tracebacks.
3535
3636# Third party
3737from pygments .lexers import (
38- BashLexer , HtmlLexer , JavascriptLexer , RubyLexer , PerlLexer , PythonLexer ,
39- Python3Lexer , TexLexer )
38+ BashLexer ,
39+ HtmlLexer ,
40+ JavascriptLexer ,
41+ RubyLexer ,
42+ PerlLexer ,
43+ Python3Lexer ,
44+ TexLexer ,
45+ )
4046from pygments .lexer import (
41- Lexer , DelegatingLexer , RegexLexer , do_insertions , bygroups , using ,
47+ Lexer ,
48+ DelegatingLexer ,
49+ RegexLexer ,
50+ do_insertions ,
51+ bygroups ,
52+ using ,
53+ inherit ,
4254)
4355from pygments .token import (
4456 Generic , Keyword , Literal , Name , Operator , Other , Text , Error ,
4961
5062line_re = re .compile ('.*?\n ' )
5163
52- __all__ = ['build_ipy_lexer' , 'IPython3Lexer' , 'IPythonLexer' ,
53- 'IPythonPartialTracebackLexer' , 'IPythonTracebackLexer' ,
54- 'IPythonConsoleLexer' , 'IPyLexer' ]
55-
64+ __all__ = [
65+ "IPython3Lexer" ,
66+ "IPythonPartialTracebackLexer" ,
67+ "IPythonTracebackLexer" ,
68+ "IPythonConsoleLexer" ,
69+ "IPyLexer" ,
70+ ]
5671
57- def build_ipy_lexer (python3 ):
58- """Builds IPython lexers depending on the value of `python3`.
5972
60- The lexer inherits from an appropriate Python lexer and then adds
61- information about IPython specific keywords (i.e. magic commands,
62- shell commands, etc.)
73+ class IPython3Lexer (Python3Lexer ):
74+ """IPython3 Lexer"""
6375
64- Parameters
65- ----------
66- python3 : bool
67- If `True`, then build an IPython lexer from a Python 3 lexer.
76+ name = "IPython3"
77+ aliases = ["ipython3" ]
6878
69- """
70- # It would be nice to have a single IPython lexer class which takes
71- # a boolean `python3`. But since there are two Python lexer classes,
72- # we will also have two IPython lexer classes.
73- if python3 :
74- PyLexer = Python3Lexer
75- name = 'IPython3'
76- aliases = ['ipython3' ]
77- doc = """IPython3 Lexer"""
78- else :
79- PyLexer = PythonLexer
80- name = 'IPython'
81- aliases = ['ipython2' , 'ipython' ]
82- doc = """IPython Lexer"""
83-
84- ipython_tokens = [
85- (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
86- (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
87- (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (HtmlLexer ))),
88- (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
89- (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
90- (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (TexLexer ))),
91- (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PerlLexer ))),
92- (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
93- (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
94- (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
95- (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PythonLexer ))),
96- (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
97- (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (RubyLexer ))),
98- (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
99- (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
100- (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
101- (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
102- (r"(?s)(\s*)(%%)(\w+)(.*)" , bygroups (Text , Operator , Keyword , Text )),
103- (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (BashLexer ))),
104- (r"(%%?)(\w+)(\?\??)$" , bygroups (Operator , Keyword , Operator )),
105- (r"\b(\?\??)(\s*)$" , bygroups (Operator , Text )),
106- (r'(%)(sx|sc|system)(.*)(\n)' , bygroups (Operator , Keyword ,
107- using (BashLexer ), Text )),
108- (r'(%)(\w+)(.*\n)' , bygroups (Operator , Keyword , Text )),
109- (r'^(!!)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
110- (r'(!)(?!=)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
111- (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)' , bygroups (Text , Operator , Text )),
112- (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$' , bygroups (Text , Operator , Text )),
113- ]
114-
115- tokens = PyLexer .tokens .copy ()
116- tokens ['root' ] = ipython_tokens + tokens ['root' ]
117-
118- attrs = {'name' : name , 'aliases' : aliases , 'filenames' : [],
119- '__doc__' : doc , 'tokens' : tokens }
120-
121- return type (name , (PyLexer ,), attrs )
122-
123-
124- IPython3Lexer = build_ipy_lexer (python3 = True )
125- IPythonLexer = build_ipy_lexer (python3 = False )
79+ tokens = {
80+ "root" : [
81+ (
82+ r"(?s)(\s*)(%%capture)([^\n]*\n)(.*)" ,
83+ bygroups (Text , Operator , Text , using (Python3Lexer )),
84+ ),
85+ (
86+ r"(?s)(\s*)(%%debug)([^\n]*\n)(.*)" ,
87+ bygroups (Text , Operator , Text , using (Python3Lexer )),
88+ ),
89+ (
90+ r"(?is)(\s*)(%%html)([^\n]*\n)(.*)" ,
91+ bygroups (Text , Operator , Text , using (HtmlLexer )),
92+ ),
93+ (
94+ r"(?s)(\s*)(%%javascript)([^\n]*\n)(.*)" ,
95+ bygroups (Text , Operator , Text , using (JavascriptLexer )),
96+ ),
97+ (
98+ r"(?s)(\s*)(%%js)([^\n]*\n)(.*)" ,
99+ bygroups (Text , Operator , Text , using (JavascriptLexer )),
100+ ),
101+ (
102+ r"(?s)(\s*)(%%latex)([^\n]*\n)(.*)" ,
103+ bygroups (Text , Operator , Text , using (TexLexer )),
104+ ),
105+ (
106+ r"(?s)(\s*)(%%perl)([^\n]*\n)(.*)" ,
107+ bygroups (Text , Operator , Text , using (PerlLexer )),
108+ ),
109+ (
110+ r"(?s)(\s*)(%%prun)([^\n]*\n)(.*)" ,
111+ bygroups (Text , Operator , Text , using (Python3Lexer )),
112+ ),
113+ (
114+ r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)" ,
115+ bygroups (Text , Operator , Text , using (Python3Lexer )),
116+ ),
117+ (
118+ r"(?s)(\s*)(%%python)([^\n]*\n)(.*)" ,
119+ bygroups (Text , Operator , Text , using (Python3Lexer )),
120+ ),
121+ (
122+ r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)" ,
123+ bygroups (Text , Operator , Text , using (Python3Lexer )),
124+ ),
125+ (
126+ r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)" ,
127+ bygroups (Text , Operator , Text , using (RubyLexer )),
128+ ),
129+ (
130+ r"(?s)(\s*)(%%time)([^\n]*\n)(.*)" ,
131+ bygroups (Text , Operator , Text , using (Python3Lexer )),
132+ ),
133+ (
134+ r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)" ,
135+ bygroups (Text , Operator , Text , using (Python3Lexer )),
136+ ),
137+ (
138+ r"(?s)(\s*)(%%writefile)([^\n]*\n)(.*)" ,
139+ bygroups (Text , Operator , Text , using (Python3Lexer )),
140+ ),
141+ (
142+ r"(?s)(\s*)(%%file)([^\n]*\n)(.*)" ,
143+ bygroups (Text , Operator , Text , using (Python3Lexer )),
144+ ),
145+ (r"(?s)(\s*)(%%)(\w+)(.*)" , bygroups (Text , Operator , Keyword , Text )),
146+ (
147+ r"(?s)(^\s*)(%%!)([^\n]*\n)(.*)" ,
148+ bygroups (Text , Operator , Text , using (BashLexer )),
149+ ),
150+ (r"(%%?)(\w+)(\?\??)$" , bygroups (Operator , Keyword , Operator )),
151+ (r"\b(\?\??)(\s*)$" , bygroups (Operator , Text )),
152+ (
153+ r"(%)(sx|sc|system)(.*)(\n)" ,
154+ bygroups (Operator , Keyword , using (BashLexer ), Text ),
155+ ),
156+ (r"(%)(\w+)(.*\n)" , bygroups (Operator , Keyword , Text )),
157+ (r"^(!!)(.+)(\n)" , bygroups (Operator , using (BashLexer ), Text )),
158+ (r"(!)(?!=)(.+)(\n)" , bygroups (Operator , using (BashLexer ), Text )),
159+ (r"^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)" , bygroups (Text , Operator , Text )),
160+ (r"(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$" , bygroups (Text , Operator , Text )),
161+ inherit ,
162+ ]
163+ }
126164
127165
128166class IPythonPartialTracebackLexer (RegexLexer ):
@@ -184,9 +222,9 @@ class IPythonTracebackLexer(DelegatingLexer):
184222 this is the line which lists the File and line number.
185223
186224 """
187- # The lexer inherits from DelegatingLexer. The "root" lexer is an
188- # appropriate IPython lexer, which depends on the value of the boolean
189- # `python3`. First, we parse with the partial IPython traceback lexer.
225+
226+ # The lexer inherits from DelegatingLexer. The "root" lexer is the
227+ # IPython3 lexer. First, we parse with the partial IPython traceback lexer.
190228 # Then, any code marked with the "Other" token is delegated to the root
191229 # lexer.
192230 #
@@ -201,19 +239,9 @@ def __init__(self, **options):
201239 # note we need a __init__ doc, as otherwise it inherits the doc from the super class
202240 # which will fail the documentation build as it references section of the pygments docs that
203241 # do not exists when building IPython's docs.
204- self .python3 = get_bool_opt (options , 'python3' , False )
205- if self .python3 :
206- self .aliases = ['ipython3tb' ]
207- else :
208- self .aliases = ['ipython2tb' , 'ipythontb' ]
209242
210- if self .python3 :
211- IPyLexer = IPython3Lexer
212- else :
213- IPyLexer = IPythonLexer
243+ super ().__init__ (IPython3Lexer , IPythonPartialTracebackLexer , ** options )
214244
215- DelegatingLexer .__init__ (self , IPyLexer ,
216- IPythonPartialTracebackLexer , ** options )
217245
218246class IPythonConsoleLexer (Lexer ):
219247 """
@@ -255,8 +283,8 @@ class IPythonConsoleLexer(Lexer):
255283 # continuation = ' .D.: '
256284 # template = 'Out[#]: '
257285 #
258- # Where '#' is the 'prompt number' or 'execution count' and 'D'
259- # D is a number of dots matching the width of the execution count
286+ # Where '#' is the 'prompt number' or 'execution count' and 'D'
287+ # D is a number of dots matching the width of the execution count
260288 #
261289 in1_regex = r'In \[[0-9]+\]: '
262290 in2_regex = r' \.\.+\.: '
@@ -270,9 +298,6 @@ def __init__(self, **options):
270298
271299 Parameters
272300 ----------
273- python3 : bool
274- If `True`, then the console inputs are parsed using a Python 3
275- lexer. Otherwise, they are parsed using a Python 2 lexer.
276301 in1_regex : RegexObject
277302 The compiled regular expression used to detect the start
278303 of inputs. Although the IPython configuration setting may have a
@@ -288,11 +313,7 @@ def __init__(self, **options):
288313 then the default output prompt is assumed.
289314
290315 """
291- self .python3 = get_bool_opt (options , 'python3' , False )
292- if self .python3 :
293- self .aliases = ['ipython3console' ]
294- else :
295- self .aliases = ['ipython2console' , 'ipythonconsole' ]
316+ self .aliases = ["ipython3console" ]
296317
297318 in1_regex = options .get ('in1_regex' , self .in1_regex )
298319 in2_regex = options .get ('in2_regex' , self .in2_regex )
@@ -318,15 +339,8 @@ def __init__(self, **options):
318339
319340 Lexer .__init__ (self , ** options )
320341
321- if self .python3 :
322- pylexer = IPython3Lexer
323- tblexer = IPythonTracebackLexer
324- else :
325- pylexer = IPythonLexer
326- tblexer = IPythonTracebackLexer
327-
328- self .pylexer = pylexer (** options )
329- self .tblexer = tblexer (** options )
342+ self .pylexer = IPython3Lexer (** options )
343+ self .tblexer = IPythonTracebackLexer (** options )
330344
331345 self .reset ()
332346
@@ -512,20 +526,16 @@ class IPyLexer(Lexer):
512526 def __init__ (self , ** options ):
513527 """
514528 Create a new IPyLexer instance which dispatch to either an
515- IPythonCOnsoleLexer (if In prompts are present) or and IPythonLexer (if
529+ IPythonConsoleLexer (if In prompts are present) or and IPython3Lexer (if
516530 In prompts are not present).
517531 """
518532 # init docstring is necessary for docs not to fail to build do to parent
519533 # docs referenceing a section in pygments docs.
520- self .python3 = get_bool_opt (options , 'python3' , False )
521- if self .python3 :
522- self .aliases = ['ipy3' ]
523- else :
524- self .aliases = ['ipy2' , 'ipy' ]
534+ self .aliases = ["ipy3" ]
525535
526536 Lexer .__init__ (self , ** options )
527537
528- self .IPythonLexer = IPythonLexer (** options )
538+ self .IPythonLexer = IPython3Lexer (** options )
529539 self .IPythonConsoleLexer = IPythonConsoleLexer (** options )
530540
531541 def get_tokens_unprocessed (self , text ):
@@ -537,4 +547,3 @@ def get_tokens_unprocessed(self, text):
537547 lex = self .IPythonLexer
538548 for token in lex .get_tokens_unprocessed (text ):
539549 yield token
540-
0 commit comments