mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-06 09:20:57 +01:00
lexer: output new tokens
This commit is contained in:
parent
2671291941
commit
ad45f28d58
1 changed files with 25 additions and 0 deletions
|
@ -218,6 +218,28 @@ def handle_dollar(state, token):
|
|||
yield _new_token("ERRORTOKEN", m, token.start)
|
||||
|
||||
|
||||
def handle_atdollar(state, token):
|
||||
"""
|
||||
Function for generating PLY tokens associated with ``@$``.
|
||||
"""
|
||||
n = next(state['stream'], None)
|
||||
|
||||
if n is None:
|
||||
state['last'] = token
|
||||
m = "missing token after @$"
|
||||
yield _new_token("ERRORTOKEN", m, token.start)
|
||||
elif n.type == tokenize.OP and n.string == '(' and \
|
||||
n.start == token.end:
|
||||
state['pymode'].append((False, '@$(', ')', token.start))
|
||||
state['last'] = n
|
||||
yield _new_token('ATDOLLAR_LPAREN', '@$(', token.start)
|
||||
else:
|
||||
state['last'] = token
|
||||
yield _new_token('ATDOLLAR', '@$', token.start)
|
||||
yield from handle_token(state, n)
|
||||
|
||||
|
||||
|
||||
def handle_at(state, token):
|
||||
"""
|
||||
Function for generating PLY tokens associated with ``@``.
|
||||
|
@ -401,6 +423,7 @@ special_handlers = {
|
|||
(tokenize.OP, '}'): handle_rbrace,
|
||||
(tokenize.OP, '['): handle_lbracket,
|
||||
(tokenize.OP, ']'): handle_rbracket,
|
||||
(tokenize.OP, '@$'): handle_atdollar,
|
||||
(tokenize.ERRORTOKEN, '$'): handle_dollar,
|
||||
(tokenize.ERRORTOKEN, '?'): handle_question,
|
||||
(tokenize.ERRORTOKEN, '!'): handle_bang,
|
||||
|
@ -556,4 +579,6 @@ class Lexer(object):
|
|||
'DOLLAR_LPAREN', # $(
|
||||
'DOLLAR_LBRACE', # ${
|
||||
'DOLLAR_LBRACKET', # $[
|
||||
'ATDOLLAR', # @$
|
||||
'ATDOLLAR_LPAREN', # @$(
|
||||
) + tuple(i.upper() for i in kwlist)
|
||||
|
|
Loading…
Add table
Reference in a new issue