From 5c18d599dd53d4d3a87f412c5b0ff0fc4a46647b Mon Sep 17 00:00:00 2001 From: David Strobach Date: Tue, 11 Aug 2020 01:41:17 +0200 Subject: [PATCH] Add walrus operator token --- xonsh/lexer.py | 3 +++ xonsh/parsers/v38.py | 37 +++++++++++++++++++++++++++++++++++++ xonsh/tokenize.py | 11 ++++++++++- 3 files changed, 50 insertions(+), 1 deletion(-) diff --git a/xonsh/lexer.py b/xonsh/lexer.py index 4ebcf1f7a..86a7ffa37 100644 --- a/xonsh/lexer.py +++ b/xonsh/lexer.py @@ -34,6 +34,7 @@ from xonsh.tokenize import ( RIGHTSHIFT, tokenize, TokenError, + HAS_WALRUS, ) @@ -111,6 +112,8 @@ def token_map(): tm[ASYNC] = "ASYNC" tm[AWAIT] = "AWAIT" + if HAS_WALRUS: + tm[(OP, ":=")] = "COLONEQUAL" return tm diff --git a/xonsh/parsers/v38.py b/xonsh/parsers/v38.py index 5d6bfffad..4c3e8708e 100644 --- a/xonsh/parsers/v38.py +++ b/xonsh/parsers/v38.py @@ -7,6 +7,43 @@ from xonsh.parsers.v36 import Parser as ThreeSixParser class Parser(ThreeSixParser): """A Python v3.8 compliant parser for the xonsh language.""" + def __init__( + self, + lexer_optimize=True, + lexer_table="xonsh.lexer_table", + yacc_optimize=True, + yacc_table="xonsh.parser_table", + yacc_debug=False, + outputdir=None, + ): + """Parameters + ---------- + lexer_optimize : bool, optional + Set to false when unstable and true when lexer is stable. + lexer_table : str, optional + Lexer module used when optimized. + yacc_optimize : bool, optional + Set to false when unstable and true when parser is stable. + yacc_table : str, optional + Parser module used when optimized. + yacc_debug : debug, optional + Dumps extra debug info. + outputdir : str or None, optional + The directory to place generated tables within. + """ + # Rule creation and modification *must* take place before super() + tok_rules = ["colonequal"] + for rule in tok_rules: + self._tok_rule(rule) + super().__init__( + lexer_optimize=lexer_optimize, + lexer_table=lexer_table, + yacc_optimize=yacc_optimize, + yacc_table=yacc_table, + yacc_debug=yacc_debug, + outputdir=outputdir, + ) + def p_parameters(self, p): """parameters : LPAREN typedargslist_opt RPAREN""" p2 = p[2] diff --git a/xonsh/tokenize.py b/xonsh/tokenize.py index b6d3dfcef..4169ca85b 100644 --- a/xonsh/tokenize.py +++ b/xonsh/tokenize.py @@ -82,6 +82,10 @@ from token import ( from xonsh.lazyasd import LazyObject from xonsh.platform import PYTHON_VERSION_INFO +HAS_WALRUS = PYTHON_VERSION_INFO > (3, 8) +if HAS_WALRUS: + from token import COLONEQUAL + cookie_re = LazyObject( lambda: re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)", re.ASCII), globals(), @@ -119,7 +123,10 @@ else: ADDSPACE_TOKS = (NAME, NUMBER) del token # must clean up token -AUGASSIGN_OPS = r"[+\-*/%&@|^=<>]=?" +if HAS_WALRUS: + AUGASSIGN_OPS = r"[+\-*/%&@|^=<>:]=?" +else: + AUGASSIGN_OPS = r"[+\-*/%&@|^=<>]=?" COMMENT = N_TOKENS @@ -215,6 +222,8 @@ EXACT_TOKEN_TYPES = { "//=": DOUBLESLASHEQUAL, "@": AT, } +if HAS_WALRUS: + EXACT_TOKEN_TYPES[":="] = COLONEQUAL EXACT_TOKEN_TYPES.update(_xonsh_tokens)