mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-06 17:30:59 +01:00
raw attempt
This commit is contained in:
parent
acce6ed89a
commit
0430524ee6
3 changed files with 26 additions and 5 deletions
|
@ -15,7 +15,7 @@ from xonsh.lazyasd import lazyobject
|
|||
from xonsh.platform import PYTHON_VERSION_INFO
|
||||
from xonsh.tokenize import (OP, IOREDIRECT, STRING, DOLLARNAME, NUMBER,
|
||||
SEARCHPATH, NEWLINE, INDENT, DEDENT, NL, COMMENT, ENCODING,
|
||||
ENDMARKER, NAME, ERRORTOKEN, tokenize, TokenError)
|
||||
ENDMARKER, NAME, ERRORTOKEN, tokenize, TokenError, NOCOMMA)
|
||||
|
||||
|
||||
@lazyobject
|
||||
|
@ -62,6 +62,7 @@ def token_map():
|
|||
from xonsh.tokenize import ASYNC, AWAIT
|
||||
tm[ASYNC] = 'ASYNC'
|
||||
tm[AWAIT] = 'AWAIT'
|
||||
tm[NOCOMMA] = 'NOCOMMA'
|
||||
return tm
|
||||
|
||||
|
||||
|
|
|
@ -233,7 +233,7 @@ class BaseParser(object):
|
|||
'op_factor_list', 'trailer_list', 'testlist_comp',
|
||||
'yield_expr_or_testlist_comp', 'dictorsetmaker',
|
||||
'comma_subscript_list', 'test', 'sliceop', 'comp_iter',
|
||||
'yield_arg', 'test_comma_list']
|
||||
'yield_arg', 'test_comma_list', 'comma_nocomma_list', 'macroarglist']
|
||||
for rule in opt_rules:
|
||||
self._opt_rule(rule)
|
||||
|
||||
|
@ -247,7 +247,7 @@ class BaseParser(object):
|
|||
'pm_term', 'op_factor', 'trailer', 'comma_subscript',
|
||||
'comma_expr_or_star_expr', 'comma_test', 'comma_argument',
|
||||
'comma_item', 'attr_period_name', 'test_comma',
|
||||
'equals_yield_expr_or_testlist']
|
||||
'equals_yield_expr_or_testlist', 'comma_nocomma']
|
||||
for rule in list_rules:
|
||||
self._list_rule(rule)
|
||||
|
||||
|
@ -1831,6 +1831,21 @@ class BaseParser(object):
|
|||
"""
|
||||
p[0] = [p[1]]
|
||||
|
||||
def p_comma_nocomma(self, p):
|
||||
"""comma_nocomma : COMMA NOCOMMA"""
|
||||
p[0] = [p[2]]
|
||||
|
||||
def p_macroarglist(self, p):
|
||||
"""macroarglist : NOCOMMA comma_nocomma_list_opt comma_opt"""
|
||||
p1, p2 = p[1], p[2]
|
||||
if p2 is None:
|
||||
elts = [p1]
|
||||
else:
|
||||
elts = [p1] + p2
|
||||
p0 = ast.Tuple(elts=elts, ctx.load(), lineno=p1.lineno,
|
||||
col_offset=p1.col_offset)
|
||||
p[0] = p0
|
||||
|
||||
def p_subscriptlist(self, p):
|
||||
"""subscriptlist : subscript comma_subscript_list_opt comma_opt"""
|
||||
p1, p2 = p[1], p[2]
|
||||
|
|
|
@ -51,7 +51,7 @@ import token
|
|||
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
|
||||
"NL", "untokenize", "ENCODING", "TokenInfo",
|
||||
"TokenError", 'SEARCHPATH', 'ATDOLLAR', 'ATEQUAL',
|
||||
'DOLLARNAME', 'IOREDIRECT']
|
||||
'DOLLARNAME', 'IOREDIRECT', 'NOCOMMA']
|
||||
PY35 = PYTHON_VERSION_INFO >= (3, 5, 0)
|
||||
if PY35:
|
||||
ASYNC = token.ASYNC
|
||||
|
@ -85,6 +85,9 @@ N_TOKENS += 1
|
|||
ATEQUAL = N_TOKENS
|
||||
tok_name[N_TOKENS] = 'ATEQUAL'
|
||||
N_TOKENS += 1
|
||||
NOCOMMA = N_TOKENS
|
||||
tok_name[N_TOKENS] = 'NOCOMMA'
|
||||
N_TOKENS += 1
|
||||
_xonsh_tokens = {
|
||||
'?': 'QUESTION',
|
||||
'@=': 'ATEQUAL',
|
||||
|
@ -241,8 +244,10 @@ Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=", r"//=?", r"->",
|
|||
Bracket = '[][(){}]'
|
||||
Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
|
||||
Funny = group(Operator, Bracket, Special)
|
||||
NoComma = r"('.*'|\".*\"|'''.*'''|\"\"\".*\"\"\"|\(.*\)|\[.*\]|{.*}|[^,]*)*"
|
||||
|
||||
PlainToken = group(IORedirect, Number, Funny, String, Name_RE, SearchPath)
|
||||
PlainToken = group(IORedirect, Number, Funny, String, Name_RE, SearchPath,
|
||||
NoComma)
|
||||
Token = Ignore + PlainToken
|
||||
|
||||
# First (or only) line of ' or " string.
|
||||
|
|
Loading…
Add table
Reference in a new issue