forked from prompt-toolkit/ptpython
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpython-embed-with-custom-prompt.py
More file actions
executable file
·51 lines (38 loc) · 1.55 KB
/
python-embed-with-custom-prompt.py
File metadata and controls
executable file
·51 lines (38 loc) · 1.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
#!/usr/bin/env python
"""
Example of embedding a Python REPL, and setting a custom prompt.
"""
from __future__ import unicode_literals
from ptpython.repl import embed
from ptpython.prompt_style import PromptStyle
from pygments.token import Token
def configure(repl):
# There are several ways to override the prompt.
# 1. Probably, the best is to add a new PromptStyle to `all_prompt_styles`
# and activate it. This way, the other styles are still selectable from
# the menu.
class CustomPrompt(PromptStyle):
def in_tokens(self, cli):
return [
(Token.In, 'Input['),
(Token.In.Number, '%s' % repl.current_statement_index),
(Token.In, '] >>: '),
]
def out_tokens(self, cli):
return [
(Token.Out, 'Result['),
(Token.Out.Number, '%s' % repl.current_statement_index),
(Token.Out, ']: '),
]
repl.all_prompt_styles['custom'] = CustomPrompt()
repl.prompt_style = 'custom'
# 2. Assign a new callable to `get_input_prompt_tokens`. This will always take effect.
## repl.get_input_prompt_tokens = lambda cli: [(Token.In, '[hello] >>> ')]
# 3. Also replace `get_input_prompt_tokens`, but still call the original. This inserts
# a prefix.
## original = repl.get_input_prompt_tokens
## repl.get_input_prompt_tokens = lambda cli: [(Token.In, '[prefix]')] + original(cli)
def main():
embed(globals(), locals(), configure=configure)
if __name__ == '__main__':
main()