44
55This includes:
66
7- IPython3Lexer
8- Lexer for pure IPython (python + magic/shell commands)
7+ IPythonLexer, IPython3Lexer
8+ Lexers for pure IPython (python + magic/shell commands)
99
1010 IPythonPartialTracebackLexer, IPythonTracebackLexer
11- The partial traceback lexer reads everything but the Python code
12- appearing in a traceback.
13- The full lexer combines the partial lexer with the IPython3Lexer .
11+ Supports 2.x and 3.x via keyword `python3`. The partial traceback
12+ lexer reads everything but the Python code appearing in a traceback.
13+ The full lexer combines the partial lexer with an IPython lexer .
1414
1515 IPythonConsoleLexer
1616 A lexer for IPython console sessions, with support for tracebacks.
3535
3636# Third party
3737from pygments .lexers import (
38- BashLexer ,
39- HtmlLexer ,
40- JavascriptLexer ,
41- RubyLexer ,
42- PerlLexer ,
43- Python3Lexer ,
44- TexLexer ,
45- )
38+ BashLexer , HtmlLexer , JavascriptLexer , RubyLexer , PerlLexer , PythonLexer ,
39+ Python3Lexer , TexLexer )
4640from pygments .lexer import (
47- Lexer ,
48- DelegatingLexer ,
49- RegexLexer ,
50- do_insertions ,
51- bygroups ,
52- using ,
53- inherit ,
41+ Lexer , DelegatingLexer , RegexLexer , do_insertions , bygroups , using ,
5442)
5543from pygments .token import (
5644 Generic , Keyword , Literal , Name , Operator , Other , Text , Error ,
6149
6250line_re = re .compile ('.*?\n ' )
6351
64- __all__ = [
65- "IPython3Lexer" ,
66- "IPythonPartialTracebackLexer" ,
67- "IPythonTracebackLexer" ,
68- "IPythonConsoleLexer" ,
69- "IPyLexer" ,
70- ]
52+ __all__ = ['build_ipy_lexer' , 'IPython3Lexer' , 'IPythonLexer' ,
53+ 'IPythonPartialTracebackLexer' , 'IPythonTracebackLexer' ,
54+ 'IPythonConsoleLexer' , 'IPyLexer' ]
7155
7256
73- class IPython3Lexer ( Python3Lexer ):
74- """IPython3 Lexer"""
57+ def build_ipy_lexer ( python3 ):
58+ """Builds IPython lexers depending on the value of `python3`.
7559
76- name = "IPython3"
77- aliases = ["ipython3" ]
60+ The lexer inherits from an appropriate Python lexer and then adds
61+ information about IPython specific keywords (i.e. magic commands,
62+ shell commands, etc.)
7863
79- tokens = {
80- "root" : [
81- (
82- r"(?s)(\s*)(%%capture)([^\n]*\n)(.*)" ,
83- bygroups (Text , Operator , Text , using (Python3Lexer )),
84- ),
85- (
86- r"(?s)(\s*)(%%debug)([^\n]*\n)(.*)" ,
87- bygroups (Text , Operator , Text , using (Python3Lexer )),
88- ),
89- (
90- r"(?is)(\s*)(%%html)([^\n]*\n)(.*)" ,
91- bygroups (Text , Operator , Text , using (HtmlLexer )),
92- ),
93- (
94- r"(?s)(\s*)(%%javascript)([^\n]*\n)(.*)" ,
95- bygroups (Text , Operator , Text , using (JavascriptLexer )),
96- ),
97- (
98- r"(?s)(\s*)(%%js)([^\n]*\n)(.*)" ,
99- bygroups (Text , Operator , Text , using (JavascriptLexer )),
100- ),
101- (
102- r"(?s)(\s*)(%%latex)([^\n]*\n)(.*)" ,
103- bygroups (Text , Operator , Text , using (TexLexer )),
104- ),
105- (
106- r"(?s)(\s*)(%%perl)([^\n]*\n)(.*)" ,
107- bygroups (Text , Operator , Text , using (PerlLexer )),
108- ),
109- (
110- r"(?s)(\s*)(%%prun)([^\n]*\n)(.*)" ,
111- bygroups (Text , Operator , Text , using (Python3Lexer )),
112- ),
113- (
114- r"(?s)(\s*)(%%pypy)([^\n]*\n)(.*)" ,
115- bygroups (Text , Operator , Text , using (Python3Lexer )),
116- ),
117- (
118- r"(?s)(\s*)(%%python)([^\n]*\n)(.*)" ,
119- bygroups (Text , Operator , Text , using (Python3Lexer )),
120- ),
121- (
122- r"(?s)(\s*)(%%python3)([^\n]*\n)(.*)" ,
123- bygroups (Text , Operator , Text , using (Python3Lexer )),
124- ),
125- (
126- r"(?s)(\s*)(%%ruby)([^\n]*\n)(.*)" ,
127- bygroups (Text , Operator , Text , using (RubyLexer )),
128- ),
129- (
130- r"(?s)(\s*)(%%time)([^\n]*\n)(.*)" ,
131- bygroups (Text , Operator , Text , using (Python3Lexer )),
132- ),
133- (
134- r"(?s)(\s*)(%%timeit)([^\n]*\n)(.*)" ,
135- bygroups (Text , Operator , Text , using (Python3Lexer )),
136- ),
137- (
138- r"(?s)(\s*)(%%writefile)([^\n]*\n)(.*)" ,
139- bygroups (Text , Operator , Text , using (Python3Lexer )),
140- ),
141- (
142- r"(?s)(\s*)(%%file)([^\n]*\n)(.*)" ,
143- bygroups (Text , Operator , Text , using (Python3Lexer )),
144- ),
145- (r"(?s)(\s*)(%%)(\w+)(.*)" , bygroups (Text , Operator , Keyword , Text )),
146- (
147- r"(?s)(^\s*)(%%!)([^\n]*\n)(.*)" ,
148- bygroups (Text , Operator , Text , using (BashLexer )),
149- ),
150- (r"(%%?)(\w+)(\?\??)$" , bygroups (Operator , Keyword , Operator )),
151- (r"\b(\?\??)(\s*)$" , bygroups (Operator , Text )),
152- (
153- r"(%)(sx|sc|system)(.*)(\n)" ,
154- bygroups (Operator , Keyword , using (BashLexer ), Text ),
155- ),
156- (r"(%)(\w+)(.*\n)" , bygroups (Operator , Keyword , Text )),
157- (r"^(!!)(.+)(\n)" , bygroups (Operator , using (BashLexer ), Text )),
158- (r"(!)(?!=)(.+)(\n)" , bygroups (Operator , using (BashLexer ), Text )),
159- (r"^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)" , bygroups (Text , Operator , Text )),
160- (r"(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$" , bygroups (Text , Operator , Text )),
161- inherit ,
162- ]
163- }
64+ Parameters
65+ ----------
66+ python3 : bool
67+ If `True`, then build an IPython lexer from a Python 3 lexer.
68+
69+ """
70+ # It would be nice to have a single IPython lexer class which takes
71+ # a boolean `python3`. But since there are two Python lexer classes,
72+ # we will also have two IPython lexer classes.
73+ if python3 :
74+ PyLexer = Python3Lexer
75+ name = 'IPython3'
76+ aliases = ['ipython3' ]
77+ doc = """IPython3 Lexer"""
78+ else :
79+ PyLexer = PythonLexer
80+ name = 'IPython'
81+ aliases = ['ipython2' , 'ipython' ]
82+ doc = """IPython Lexer"""
83+
84+ ipython_tokens = [
85+ (r'(?s)(\s*)(%%capture)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
86+ (r'(?s)(\s*)(%%debug)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
87+ (r'(?is)(\s*)(%%html)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (HtmlLexer ))),
88+ (r'(?s)(\s*)(%%javascript)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
89+ (r'(?s)(\s*)(%%js)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (JavascriptLexer ))),
90+ (r'(?s)(\s*)(%%latex)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (TexLexer ))),
91+ (r'(?s)(\s*)(%%perl)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PerlLexer ))),
92+ (r'(?s)(\s*)(%%prun)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
93+ (r'(?s)(\s*)(%%pypy)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
94+ (r'(?s)(\s*)(%%python)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
95+ (r'(?s)(\s*)(%%python2)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PythonLexer ))),
96+ (r'(?s)(\s*)(%%python3)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (Python3Lexer ))),
97+ (r'(?s)(\s*)(%%ruby)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (RubyLexer ))),
98+ (r'(?s)(\s*)(%%time)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
99+ (r'(?s)(\s*)(%%timeit)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
100+ (r'(?s)(\s*)(%%writefile)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
101+ (r'(?s)(\s*)(%%file)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (PyLexer ))),
102+ (r"(?s)(\s*)(%%)(\w+)(.*)" , bygroups (Text , Operator , Keyword , Text )),
103+ (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)' , bygroups (Text , Operator , Text , using (BashLexer ))),
104+ (r"(%%?)(\w+)(\?\??)$" , bygroups (Operator , Keyword , Operator )),
105+ (r"\b(\?\??)(\s*)$" , bygroups (Operator , Text )),
106+ (r'(%)(sx|sc|system)(.*)(\n)' , bygroups (Operator , Keyword ,
107+ using (BashLexer ), Text )),
108+ (r'(%)(\w+)(.*\n)' , bygroups (Operator , Keyword , Text )),
109+ (r'^(!!)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
110+ (r'(!)(?!=)(.+)(\n)' , bygroups (Operator , using (BashLexer ), Text )),
111+ (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)' , bygroups (Text , Operator , Text )),
112+ (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$' , bygroups (Text , Operator , Text )),
113+ ]
114+
115+ tokens = PyLexer .tokens .copy ()
116+ tokens ['root' ] = ipython_tokens + tokens ['root' ]
117+
118+ attrs = {'name' : name , 'aliases' : aliases , 'filenames' : [],
119+ '__doc__' : doc , 'tokens' : tokens }
120+
121+ return type (name , (PyLexer ,), attrs )
122+
123+
124+ IPython3Lexer = build_ipy_lexer (python3 = True )
125+ IPythonLexer = build_ipy_lexer (python3 = False )
164126
165127
166128class IPythonPartialTracebackLexer (RegexLexer ):
@@ -222,9 +184,9 @@ class IPythonTracebackLexer(DelegatingLexer):
222184 this is the line which lists the File and line number.
223185
224186 """
225-
226- # The lexer inherits from DelegatingLexer. The "root" lexer is the
227- # IPython3 lexer. First, we parse with the partial IPython traceback lexer.
187+ # The lexer inherits from DelegatingLexer. The "root" lexer is an
188+ # appropriate IPython lexer, which depends on the value of the boolean
189+ # `python3`. First, we parse with the partial IPython traceback lexer.
228190 # Then, any code marked with the "Other" token is delegated to the root
229191 # lexer.
230192 #
@@ -239,9 +201,19 @@ def __init__(self, **options):
239201 # note we need a __init__ doc, as otherwise it inherits the doc from the super class
240202 # which will fail the documentation build as it references section of the pygments docs that
241203 # do not exists when building IPython's docs.
204+ self .python3 = get_bool_opt (options , 'python3' , False )
205+ if self .python3 :
206+ self .aliases = ['ipython3tb' ]
207+ else :
208+ self .aliases = ['ipython2tb' , 'ipythontb' ]
242209
243- super ().__init__ (IPython3Lexer , IPythonPartialTracebackLexer , ** options )
210+ if self .python3 :
211+ IPyLexer = IPython3Lexer
212+ else :
213+ IPyLexer = IPythonLexer
244214
215+ DelegatingLexer .__init__ (self , IPyLexer ,
216+ IPythonPartialTracebackLexer , ** options )
245217
246218class IPythonConsoleLexer (Lexer ):
247219 """
@@ -283,8 +255,8 @@ class IPythonConsoleLexer(Lexer):
283255 # continuation = ' .D.: '
284256 # template = 'Out[#]: '
285257 #
286- # Where '#' is the 'prompt number' or 'execution count' and 'D'
287- # D is a number of dots matching the width of the execution count
258+ # Where '#' is the 'prompt number' or 'execution count' and 'D'
259+ # D is a number of dots matching the width of the execution count
288260 #
289261 in1_regex = r'In \[[0-9]+\]: '
290262 in2_regex = r' \.\.+\.: '
@@ -298,6 +270,9 @@ def __init__(self, **options):
298270
299271 Parameters
300272 ----------
273+ python3 : bool
274+ If `True`, then the console inputs are parsed using a Python 3
275+ lexer. Otherwise, they are parsed using a Python 2 lexer.
301276 in1_regex : RegexObject
302277 The compiled regular expression used to detect the start
303278 of inputs. Although the IPython configuration setting may have a
@@ -313,7 +288,11 @@ def __init__(self, **options):
313288 then the default output prompt is assumed.
314289
315290 """
316- self .aliases = ["ipython3console" ]
291+ self .python3 = get_bool_opt (options , 'python3' , False )
292+ if self .python3 :
293+ self .aliases = ['ipython3console' ]
294+ else :
295+ self .aliases = ['ipython2console' , 'ipythonconsole' ]
317296
318297 in1_regex = options .get ('in1_regex' , self .in1_regex )
319298 in2_regex = options .get ('in2_regex' , self .in2_regex )
@@ -339,8 +318,15 @@ def __init__(self, **options):
339318
340319 Lexer .__init__ (self , ** options )
341320
342- self .pylexer = IPython3Lexer (** options )
343- self .tblexer = IPythonTracebackLexer (** options )
321+ if self .python3 :
322+ pylexer = IPython3Lexer
323+ tblexer = IPythonTracebackLexer
324+ else :
325+ pylexer = IPythonLexer
326+ tblexer = IPythonTracebackLexer
327+
328+ self .pylexer = pylexer (** options )
329+ self .tblexer = tblexer (** options )
344330
345331 self .reset ()
346332
@@ -526,16 +512,20 @@ class IPyLexer(Lexer):
526512 def __init__ (self , ** options ):
527513 """
528514 Create a new IPyLexer instance which dispatch to either an
529- IPythonConsoleLexer (if In prompts are present) or and IPython3Lexer (if
515+ IPythonCOnsoleLexer (if In prompts are present) or and IPythonLexer (if
530516 In prompts are not present).
531517 """
532518 # init docstring is necessary for docs not to fail to build do to parent
533519 # docs referenceing a section in pygments docs.
534- self .aliases = ["ipy3" ]
520+ self .python3 = get_bool_opt (options , 'python3' , False )
521+ if self .python3 :
522+ self .aliases = ['ipy3' ]
523+ else :
524+ self .aliases = ['ipy2' , 'ipy' ]
535525
536526 Lexer .__init__ (self , ** options )
537527
538- self .IPythonLexer = IPython3Lexer (** options )
528+ self .IPythonLexer = IPythonLexer (** options )
539529 self .IPythonConsoleLexer = IPythonConsoleLexer (** options )
540530
541531 def get_tokens_unprocessed (self , text ):
@@ -547,3 +537,4 @@ def get_tokens_unprocessed(self, text):
547537 lex = self .IPythonLexer
548538 for token in lex .get_tokens_unprocessed (text ):
549539 yield token
540+
0 commit comments