mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-04 16:34:47 +01:00
double pipe token
This commit is contained in:
parent
ae509b8bb4
commit
0f74d164e1
2 changed files with 26 additions and 2 deletions
|
@ -133,9 +133,18 @@ def test_multiline():
|
|||
def test_and():
|
||||
yield check_token, 'and', ['AND', 'and', 0]
|
||||
|
||||
def test_ampersand():
|
||||
yield check_token, '&', ['AMPERSAND', '&', 0]
|
||||
|
||||
def test_doubleamp():
|
||||
yield check_token, '&&', ['DOUBLEAMP', '&&', 0]
|
||||
|
||||
def test_pipe():
|
||||
yield check_token, '|', ['PIPE', '|', 0]
|
||||
|
||||
def test_doublepipe():
|
||||
yield check_token, '||', ['DOUBLEPIPE', '||', 0]
|
||||
|
||||
def test_single_quote_literal():
|
||||
yield check_token, "'yo'", ['STRING', "'yo'", 0]
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ _op_map = {
|
|||
'~': 'TILDE', '^': 'XOR', '<<': 'LSHIFT', '>>': 'RSHIFT',
|
||||
'<': 'LT', '<=': 'LE', '>': 'GT', '>=': 'GE', '==': 'EQ',
|
||||
'!=': 'NE', '->': 'RARROW',
|
||||
'&&': 'DOUBLEAMP',
|
||||
'&&': 'DOUBLEAMP', '||': 'DOUBLEPIPE',
|
||||
# assignment operators
|
||||
'=': 'EQUALS', '+=': 'PLUSEQUAL', '-=': 'MINUSEQUAL',
|
||||
'*=': 'TIMESEQUAL', '@=': 'ATEQUAL', '/=': 'DIVEQUAL', '%=': 'MODEQUAL',
|
||||
|
@ -159,6 +159,20 @@ def handle_ampersands(state, token, stream):
|
|||
yield from handle_token(state, n, stream)
|
||||
|
||||
|
||||
def handle_pipes(state, token, stream):
|
||||
"""Function for generating PLY tokens for single and double pipes."""
|
||||
n = next(stream, None)
|
||||
if n is not None and n.type == tokenize.OP and \
|
||||
n.string == '|' and n.start == token.end:
|
||||
state['last'] = n
|
||||
yield _new_token('DOUBLEPIPE', '||', token.start)
|
||||
else:
|
||||
state['last'] = token
|
||||
yield _new_token('PIPE', token.string, token.start)
|
||||
if n is not None:
|
||||
yield from handle_token(state, n, stream)
|
||||
|
||||
|
||||
def handle_dollar(state, token, stream):
|
||||
"""
|
||||
Function for generating PLY tokens associated with ``$``.
|
||||
|
@ -369,6 +383,8 @@ special_handlers = {
|
|||
tokenize.NAME: handle_name,
|
||||
tokenize.NUMBER: handle_number,
|
||||
tokenize.ERRORTOKEN: handle_error_token,
|
||||
(tokenize.OP, '|'): handle_pipes,
|
||||
(tokenize.OP, '||'): handle_pipes,
|
||||
(tokenize.OP, '&'): handle_ampersands,
|
||||
(tokenize.OP, '&&'): handle_ampersands,
|
||||
(tokenize.OP, '@'): handle_at,
|
||||
|
@ -519,7 +535,6 @@ class Lexer(object):
|
|||
'NUMBER', # numbers
|
||||
'WS', # whitespace in subprocess mode
|
||||
'AMPERSAND', # &
|
||||
'DOUBLEAMP', # &&
|
||||
'REGEXPATH', # regex escaped with backticks
|
||||
'IOREDIRECT', # subprocess io redirection token
|
||||
'LPAREN', 'RPAREN', # ( )
|
||||
|
|
Loading…
Add table
Reference in a new issue