|
1 |
| -"""reST directive for syntax-highlighting ipython interactive sessions. |
2 |
| -
|
3 |
| -XXX - See what improvements can be made based on the new (as of Sept 2009) |
4 |
| -'pycon' lexer for the python console. At the very least it will give better |
5 |
| -highlighted tracebacks. |
6 | 1 | """
|
| 2 | +reST directive for syntax-highlighting ipython interactive sessions. |
7 | 3 |
|
8 |
| -#----------------------------------------------------------------------------- |
9 |
| -# Needed modules |
10 |
| - |
11 |
| -# Standard library |
12 |
| -import re |
13 |
| - |
14 |
| -# Third party |
15 |
| -from pygments.lexer import Lexer, do_insertions |
16 |
| -from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer, |
17 |
| - PythonTracebackLexer) |
18 |
| -from pygments.token import Comment, Generic |
| 4 | +""" |
19 | 5 |
|
20 | 6 | from sphinx import highlighting
|
21 |
| - |
22 |
| -#----------------------------------------------------------------------------- |
23 |
| -# Global constants |
24 |
| -line_re = re.compile('.*?\n') |
25 |
| - |
26 |
| -#----------------------------------------------------------------------------- |
27 |
| -# Code begins - classes and functions |
28 |
| - |
29 |
| - |
30 |
| -class IPythonConsoleLexer(Lexer): |
31 |
| - |
32 |
| - """ |
33 |
| - For IPython console output or doctests, such as: |
34 |
| -
|
35 |
| - .. sourcecode:: ipython |
36 |
| -
|
37 |
| - In [1]: a = 'foo' |
38 |
| -
|
39 |
| - In [2]: a |
40 |
| - Out[2]: 'foo' |
41 |
| -
|
42 |
| - In [3]: print(a) |
43 |
| - foo |
44 |
| -
|
45 |
| - In [4]: 1 / 0 |
46 |
| -
|
47 |
| - Notes: |
48 |
| -
|
49 |
| - - Tracebacks are not currently supported. |
50 |
| -
|
51 |
| - - It assumes the default IPython prompts, not customized ones. |
52 |
| - """ |
53 |
| - |
54 |
| - name = 'IPython console session' |
55 |
| - aliases = ['ipython'] |
56 |
| - mimetypes = ['text/x-ipython-console'] |
57 |
| - input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)") |
58 |
| - output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)") |
59 |
| - continue_prompt = re.compile(" \.\.\.+:") |
60 |
| - tb_start = re.compile("\-+") |
61 |
| - |
62 |
| - def get_tokens_unprocessed(self, text): |
63 |
| - pylexer = PythonLexer(**self.options) |
64 |
| - tblexer = PythonTracebackLexer(**self.options) |
65 |
| - |
66 |
| - curcode = '' |
67 |
| - insertions = [] |
68 |
| - for match in line_re.finditer(text): |
69 |
| - line = match.group() |
70 |
| - input_prompt = self.input_prompt.match(line) |
71 |
| - continue_prompt = self.continue_prompt.match(line.rstrip()) |
72 |
| - output_prompt = self.output_prompt.match(line) |
73 |
| - if line.startswith("#"): |
74 |
| - insertions.append((len(curcode), |
75 |
| - [(0, Comment, line)])) |
76 |
| - elif input_prompt is not None: |
77 |
| - insertions.append((len(curcode), |
78 |
| - [(0, Generic.Prompt, input_prompt.group())])) |
79 |
| - curcode += line[input_prompt.end():] |
80 |
| - elif continue_prompt is not None: |
81 |
| - insertions.append((len(curcode), |
82 |
| - [(0, Generic.Prompt, continue_prompt.group())])) |
83 |
| - curcode += line[continue_prompt.end():] |
84 |
| - elif output_prompt is not None: |
85 |
| - # Use the 'error' token for output. We should probably make |
86 |
| - # our own token, but error is typically in a bright color like |
87 |
| - # red, so it works fine for our output prompts. |
88 |
| - insertions.append((len(curcode), |
89 |
| - [(0, Generic.Error, output_prompt.group())])) |
90 |
| - curcode += line[output_prompt.end():] |
91 |
| - else: |
92 |
| - if curcode: |
93 |
| - for item in do_insertions(insertions, |
94 |
| - pylexer.get_tokens_unprocessed(curcode)): |
95 |
| - yield item |
96 |
| - curcode = '' |
97 |
| - insertions = [] |
98 |
| - yield match.start(), Generic.Output, line |
99 |
| - if curcode: |
100 |
| - for item in do_insertions(insertions, |
101 |
| - pylexer.get_tokens_unprocessed(curcode)): |
102 |
| - yield item |
103 |
| - |
| 7 | +from IPython.lib.lexers import IPyLexer |
104 | 8 |
|
105 | 9 | def setup(app):
|
106 | 10 | """Setup as a sphinx extension."""
|
107 | 11 |
|
108 | 12 | # This is only a lexer, so adding it below to pygments appears sufficient.
|
109 |
| - # But if somebody knows that the right API usage should be to do that via |
| 13 | + # But if somebody knows what the right API usage should be to do that via |
110 | 14 | # sphinx, by all means fix it here. At least having this setup.py
|
111 | 15 | # suppresses the sphinx warning we'd get without it.
|
112 |
| - pass |
| 16 | + metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} |
| 17 | + return metadata |
| 18 | + |
| 19 | +# Register the extension as a valid pygments lexer. |
| 20 | +# Alternatively, we could register the lexer with pygments instead. This would |
| 21 | +# require using setuptools entrypoints: http://pygments.org/docs/plugins |
| 22 | + |
| 23 | +ipy2 = IPyLexer(python3=False) |
| 24 | +ipy3 = IPyLexer(python3=True) |
113 | 25 |
|
114 |
| -#----------------------------------------------------------------------------- |
115 |
| -# Register the extension as a valid pygments lexer |
116 |
| -highlighting.lexers['ipython'] = IPythonConsoleLexer() |
| 26 | +highlighting.lexers['ipython'] = ipy2 |
| 27 | +highlighting.lexers['ipython2'] = ipy2 |
| 28 | +highlighting.lexers['ipython3'] = ipy3 |
0 commit comments