mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-05 00:41:00 +01:00
Add walrus operator token
This commit is contained in:
parent
fbf6cf2410
commit
5c18d599dd
3 changed files with 50 additions and 1 deletions
|
@ -34,6 +34,7 @@ from xonsh.tokenize import (
|
||||||
RIGHTSHIFT,
|
RIGHTSHIFT,
|
||||||
tokenize,
|
tokenize,
|
||||||
TokenError,
|
TokenError,
|
||||||
|
HAS_WALRUS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -111,6 +112,8 @@ def token_map():
|
||||||
|
|
||||||
tm[ASYNC] = "ASYNC"
|
tm[ASYNC] = "ASYNC"
|
||||||
tm[AWAIT] = "AWAIT"
|
tm[AWAIT] = "AWAIT"
|
||||||
|
if HAS_WALRUS:
|
||||||
|
tm[(OP, ":=")] = "COLONEQUAL"
|
||||||
return tm
|
return tm
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,43 @@ from xonsh.parsers.v36 import Parser as ThreeSixParser
|
||||||
class Parser(ThreeSixParser):
|
class Parser(ThreeSixParser):
|
||||||
"""A Python v3.8 compliant parser for the xonsh language."""
|
"""A Python v3.8 compliant parser for the xonsh language."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
lexer_optimize=True,
|
||||||
|
lexer_table="xonsh.lexer_table",
|
||||||
|
yacc_optimize=True,
|
||||||
|
yacc_table="xonsh.parser_table",
|
||||||
|
yacc_debug=False,
|
||||||
|
outputdir=None,
|
||||||
|
):
|
||||||
|
"""Parameters
|
||||||
|
----------
|
||||||
|
lexer_optimize : bool, optional
|
||||||
|
Set to false when unstable and true when lexer is stable.
|
||||||
|
lexer_table : str, optional
|
||||||
|
Lexer module used when optimized.
|
||||||
|
yacc_optimize : bool, optional
|
||||||
|
Set to false when unstable and true when parser is stable.
|
||||||
|
yacc_table : str, optional
|
||||||
|
Parser module used when optimized.
|
||||||
|
yacc_debug : debug, optional
|
||||||
|
Dumps extra debug info.
|
||||||
|
outputdir : str or None, optional
|
||||||
|
The directory to place generated tables within.
|
||||||
|
"""
|
||||||
|
# Rule creation and modification *must* take place before super()
|
||||||
|
tok_rules = ["colonequal"]
|
||||||
|
for rule in tok_rules:
|
||||||
|
self._tok_rule(rule)
|
||||||
|
super().__init__(
|
||||||
|
lexer_optimize=lexer_optimize,
|
||||||
|
lexer_table=lexer_table,
|
||||||
|
yacc_optimize=yacc_optimize,
|
||||||
|
yacc_table=yacc_table,
|
||||||
|
yacc_debug=yacc_debug,
|
||||||
|
outputdir=outputdir,
|
||||||
|
)
|
||||||
|
|
||||||
def p_parameters(self, p):
|
def p_parameters(self, p):
|
||||||
"""parameters : LPAREN typedargslist_opt RPAREN"""
|
"""parameters : LPAREN typedargslist_opt RPAREN"""
|
||||||
p2 = p[2]
|
p2 = p[2]
|
||||||
|
|
|
@ -82,6 +82,10 @@ from token import (
|
||||||
from xonsh.lazyasd import LazyObject
|
from xonsh.lazyasd import LazyObject
|
||||||
from xonsh.platform import PYTHON_VERSION_INFO
|
from xonsh.platform import PYTHON_VERSION_INFO
|
||||||
|
|
||||||
|
HAS_WALRUS = PYTHON_VERSION_INFO > (3, 8)
|
||||||
|
if HAS_WALRUS:
|
||||||
|
from token import COLONEQUAL
|
||||||
|
|
||||||
cookie_re = LazyObject(
|
cookie_re = LazyObject(
|
||||||
lambda: re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)", re.ASCII),
|
lambda: re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)", re.ASCII),
|
||||||
globals(),
|
globals(),
|
||||||
|
@ -119,7 +123,10 @@ else:
|
||||||
ADDSPACE_TOKS = (NAME, NUMBER)
|
ADDSPACE_TOKS = (NAME, NUMBER)
|
||||||
del token # must clean up token
|
del token # must clean up token
|
||||||
|
|
||||||
AUGASSIGN_OPS = r"[+\-*/%&@|^=<>]=?"
|
if HAS_WALRUS:
|
||||||
|
AUGASSIGN_OPS = r"[+\-*/%&@|^=<>:]=?"
|
||||||
|
else:
|
||||||
|
AUGASSIGN_OPS = r"[+\-*/%&@|^=<>]=?"
|
||||||
|
|
||||||
|
|
||||||
COMMENT = N_TOKENS
|
COMMENT = N_TOKENS
|
||||||
|
@ -215,6 +222,8 @@ EXACT_TOKEN_TYPES = {
|
||||||
"//=": DOUBLESLASHEQUAL,
|
"//=": DOUBLESLASHEQUAL,
|
||||||
"@": AT,
|
"@": AT,
|
||||||
}
|
}
|
||||||
|
if HAS_WALRUS:
|
||||||
|
EXACT_TOKEN_TYPES[":="] = COLONEQUAL
|
||||||
|
|
||||||
EXACT_TOKEN_TYPES.update(_xonsh_tokens)
|
EXACT_TOKEN_TYPES.update(_xonsh_tokens)
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue