mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-04 08:24:40 +01:00
Feat: support for match statement for python >= 3.10 (#4672)
* feat: initial match statement support * refactor assignment exprs for <3.8 compatibility; sort imports * remove match extensions * bump qa-workflow python version, minor alterations for qa-pipeline * add mypy exception * update mypy version * Bump workflow main python version to 3.10 * remove outdated note Co-authored-by: Alexander Firbas <alexander.firbas@gmail.com>
This commit is contained in:
parent
81c8fe6bb9
commit
0ba5bec86f
17 changed files with 947 additions and 76 deletions
7
.github/workflows/genbuilds.py
vendored
7
.github/workflows/genbuilds.py
vendored
|
@ -3,11 +3,12 @@
|
|||
so that we can restart indivual workflow elements without having to restart
|
||||
them all. Rerun this script to regenerate.
|
||||
"""
|
||||
from itertools import product
|
||||
import os
|
||||
import jinja2
|
||||
from itertools import product
|
||||
from pathlib import Path
|
||||
|
||||
import jinja2
|
||||
|
||||
CURR_DIR = Path(__file__).absolute().parent
|
||||
environment = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(CURR_DIR),
|
||||
|
@ -39,7 +40,7 @@ class PY:
|
|||
_310 = "3.10"
|
||||
|
||||
|
||||
PY_MAIN_VERSION = PY._39
|
||||
PY_MAIN_VERSION = PY._310
|
||||
PYTHON_VERSIONS = [val for _, val in get_attrs(PY)]
|
||||
|
||||
ALLOWED_FAILURES = []
|
||||
|
|
11
.github/workflows/pytest-linux-3.10.yml
vendored
11
.github/workflows/pytest-linux-3.10.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: pytest linux 3.10
|
||||
name: pytest cov linux 3.10
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -55,6 +55,11 @@ jobs:
|
|||
run: |
|
||||
python -m pip --version
|
||||
python -m pip install -r requirements/tests.txt
|
||||
python -m pip install . --no-deps
|
||||
python -m pip install -e . --no-deps
|
||||
- name: Run tests
|
||||
run: python -m xonsh run-tests.xsh test -- --timeout=240
|
||||
run: python -m xonsh run-tests.xsh test --report-coverage --no-amalgam -- --timeout=240
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
verbose: true
|
||||
flags: "linux"
|
||||
|
|
11
.github/workflows/pytest-linux-3.9.yml
vendored
11
.github/workflows/pytest-linux-3.9.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: pytest cov linux 3.9
|
||||
name: pytest linux 3.9
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -55,11 +55,6 @@ jobs:
|
|||
run: |
|
||||
python -m pip --version
|
||||
python -m pip install -r requirements/tests.txt
|
||||
python -m pip install -e . --no-deps
|
||||
python -m pip install . --no-deps
|
||||
- name: Run tests
|
||||
run: python -m xonsh run-tests.xsh test --report-coverage --no-amalgam -- --timeout=240
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
verbose: true
|
||||
flags: "linux"
|
||||
run: python -m xonsh run-tests.xsh test -- --timeout=240
|
||||
|
|
11
.github/workflows/pytest-macos-3.10.yml
vendored
11
.github/workflows/pytest-macos-3.10.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: pytest macos 3.10
|
||||
name: pytest cov macos 3.10
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -55,6 +55,11 @@ jobs:
|
|||
run: |
|
||||
python -m pip --version
|
||||
python -m pip install -r requirements/tests.txt
|
||||
python -m pip install . --no-deps
|
||||
python -m pip install -e . --no-deps
|
||||
- name: Run tests
|
||||
run: python -m xonsh run-tests.xsh test -- --timeout=240
|
||||
run: python -m xonsh run-tests.xsh test --report-coverage --no-amalgam -- --timeout=240
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
verbose: true
|
||||
flags: "macos"
|
||||
|
|
11
.github/workflows/pytest-macos-3.9.yml
vendored
11
.github/workflows/pytest-macos-3.9.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: pytest cov macos 3.9
|
||||
name: pytest macos 3.9
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -55,11 +55,6 @@ jobs:
|
|||
run: |
|
||||
python -m pip --version
|
||||
python -m pip install -r requirements/tests.txt
|
||||
python -m pip install -e . --no-deps
|
||||
python -m pip install . --no-deps
|
||||
- name: Run tests
|
||||
run: python -m xonsh run-tests.xsh test --report-coverage --no-amalgam -- --timeout=240
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
verbose: true
|
||||
flags: "macos"
|
||||
run: python -m xonsh run-tests.xsh test -- --timeout=240
|
||||
|
|
11
.github/workflows/pytest-windows-3.10.yml
vendored
11
.github/workflows/pytest-windows-3.10.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: pytest windows 3.10
|
||||
name: pytest cov windows 3.10
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -55,6 +55,11 @@ jobs:
|
|||
run: |
|
||||
python -m pip --version
|
||||
python -m pip install -r requirements/tests.txt
|
||||
python -m pip install . --no-deps
|
||||
python -m pip install -e . --no-deps
|
||||
- name: Run tests
|
||||
run: python -m xonsh run-tests.xsh test -- --timeout=240
|
||||
run: python -m xonsh run-tests.xsh test --report-coverage --no-amalgam -- --timeout=240
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
verbose: true
|
||||
flags: "windows"
|
||||
|
|
11
.github/workflows/pytest-windows-3.9.yml
vendored
11
.github/workflows/pytest-windows-3.9.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: pytest cov windows 3.9
|
||||
name: pytest windows 3.9
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -55,11 +55,6 @@ jobs:
|
|||
run: |
|
||||
python -m pip --version
|
||||
python -m pip install -r requirements/tests.txt
|
||||
python -m pip install -e . --no-deps
|
||||
python -m pip install . --no-deps
|
||||
- name: Run tests
|
||||
run: python -m xonsh run-tests.xsh test --report-coverage --no-amalgam -- --timeout=240
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
verbose: true
|
||||
flags: "windows"
|
||||
run: python -m xonsh run-tests.xsh test -- --timeout=240
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
name: qa linux 3.9
|
||||
name: qa linux 3.10
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -15,7 +15,7 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: [ "3.9" ]
|
||||
python-version: [ "3.10" ]
|
||||
defaults:
|
||||
run:
|
||||
shell: bash -l {0}
|
23
news/feat-match-stmt.rst
Normal file
23
news/feat-match-stmt.rst
Normal file
|
@ -0,0 +1,23 @@
|
|||
**Added:**
|
||||
|
||||
* Added python's match statement for python >=3.10.
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* SyntaxErrors thrown during compilation (i.e. not during parsing) now include the offending source line.
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
|
@ -18,7 +18,9 @@ pre-commit
|
|||
pyte>=0.8.0
|
||||
|
||||
# types related
|
||||
mypy==0.931
|
||||
# mypy==0.931
|
||||
git+git://github.com/python/mypy.git@9b3147701f054bf8ef42bd96e33153b05976a5e1
|
||||
# TODO: replace above with mypy==0.940 once its released
|
||||
types-ujson
|
||||
|
||||
# ensure tests run with the amalgamated (==production) xonsh
|
||||
|
|
|
@ -5,7 +5,7 @@ import textwrap
|
|||
|
||||
import pytest
|
||||
|
||||
from tools import VER_MAJOR_MINOR, nodes_equal, skip_if_pre_3_8
|
||||
from tools import VER_MAJOR_MINOR, nodes_equal, skip_if_pre_3_8, skip_if_pre_3_10
|
||||
from xonsh.ast import AST, Call, Pass, Str, With
|
||||
from xonsh.parser import Parser
|
||||
from xonsh.parsers.fstring_adaptor import FStringAdaptor
|
||||
|
@ -52,14 +52,23 @@ def check_stmts(check_ast):
|
|||
|
||||
@pytest.fixture
|
||||
def check_xonsh_ast(xsh, parser):
|
||||
def factory(xenv, inp, run=True, mode="eval", debug_level=0, return_obs=False):
|
||||
def factory(
|
||||
xenv,
|
||||
inp,
|
||||
run=True,
|
||||
mode="eval",
|
||||
debug_level=0,
|
||||
return_obs=False,
|
||||
globals=None,
|
||||
locals=None,
|
||||
):
|
||||
xsh.env.update(xenv)
|
||||
obs = parser.parse(inp, debug_level=debug_level)
|
||||
if obs is None:
|
||||
return # comment only
|
||||
bytecode = compile(obs, "<test-xonsh-ast>", mode)
|
||||
if run:
|
||||
exec(bytecode)
|
||||
exec(bytecode, globals, locals)
|
||||
return obs if return_obs else True
|
||||
|
||||
return factory
|
||||
|
@ -3271,3 +3280,251 @@ def test_get_repo_url(parser):
|
|||
" raw = $(git remote get-url --push origin).rstrip()\n"
|
||||
" return raw.replace('https://github.com/', '')\n"
|
||||
)
|
||||
|
||||
|
||||
# match statement
|
||||
# (tests asserting that pure python match statements produce the same ast with the xonsh parser as they do with the python parser)
|
||||
|
||||
|
||||
def test_match_and_case_are_not_keywords(check_stmts):
|
||||
check_stmts(
|
||||
"""
|
||||
match = 1
|
||||
case = 2
|
||||
def match():
|
||||
pass
|
||||
class case():
|
||||
pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_literal_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match 1:
|
||||
case 1j:
|
||||
pass
|
||||
case 2.718+3.141j:
|
||||
pass
|
||||
case -2.718-3.141j:
|
||||
pass
|
||||
case 2:
|
||||
pass
|
||||
case -2:
|
||||
pass
|
||||
case "One" 'Two':
|
||||
pass
|
||||
case None:
|
||||
pass
|
||||
case True:
|
||||
pass
|
||||
case False:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_or_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match 1:
|
||||
case 1j | 2 | "One" | 'Two' | None | True | False:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_as_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match 1:
|
||||
case 1j | 2 | "One" | 'Two' | None | True | False as target:
|
||||
pass
|
||||
case 2 as target:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_group_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match 1:
|
||||
case (None):
|
||||
pass
|
||||
case ((None)):
|
||||
pass
|
||||
case (1 | 2 as x) as x:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_capture_and_wildcard_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match 1:
|
||||
case _:
|
||||
pass
|
||||
case x:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_value_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match 1:
|
||||
case math.pi:
|
||||
pass
|
||||
case a.b.c.d:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_mapping_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match _:
|
||||
case {}:
|
||||
pass
|
||||
case {x.y:y}:
|
||||
pass
|
||||
case {x.y:y,}:
|
||||
pass
|
||||
case {x.y:y,"a":a}:
|
||||
pass
|
||||
case {x.y:y,"a":a,}:
|
||||
pass
|
||||
case {x.y:y,"a":a,**end}:
|
||||
pass
|
||||
case {x.y:y,"a":a,**end,}:
|
||||
pass
|
||||
case {**end}:
|
||||
pass
|
||||
case {**end,}:
|
||||
pass
|
||||
case {1:1, "two":two, three.three: {}, 4:None, **end}:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_class_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match _:
|
||||
case classs():
|
||||
pass
|
||||
case x.classs():
|
||||
pass
|
||||
case classs("subpattern"):
|
||||
pass
|
||||
case classs("subpattern",):
|
||||
pass
|
||||
case classs("subpattern",2):
|
||||
pass
|
||||
case classs("subpattern",2,):
|
||||
pass
|
||||
case classs(a = b):
|
||||
pass
|
||||
case classs(a = b,):
|
||||
pass
|
||||
case classs(a = b, b = c):
|
||||
pass
|
||||
case classs(a = b, b = c,):
|
||||
pass
|
||||
case classs(1,2,3,a = b):
|
||||
pass
|
||||
case classs(1,2,3,a = b,):
|
||||
pass
|
||||
case classs(1,2,3,a = b, b = c):
|
||||
pass
|
||||
case classs(1,2,3,a = b, b = c,):
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_sequence_pattern(check_stmts):
|
||||
check_stmts(
|
||||
"""match 1:
|
||||
case (): # empty sequence pattern
|
||||
pass
|
||||
case (1): # group pattern
|
||||
pass
|
||||
case (1,): # length one sequence
|
||||
pass
|
||||
case (1,2):
|
||||
pass
|
||||
case (1,2,):
|
||||
pass
|
||||
case (1,2,3):
|
||||
pass
|
||||
case (1,2,3,):
|
||||
pass
|
||||
case []:
|
||||
pass
|
||||
case [1]:
|
||||
pass
|
||||
case [1,]:
|
||||
pass
|
||||
case [1,2]:
|
||||
pass
|
||||
case [1,2,3]:
|
||||
pass
|
||||
case [1,2,3,]:
|
||||
pass
|
||||
case [*x, *_]: # star patterns
|
||||
pass
|
||||
case 1,: # top level sequence patterns
|
||||
pass
|
||||
case *x,:
|
||||
pass
|
||||
case *_,*_:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
||||
|
||||
@skip_if_pre_3_10
|
||||
def test_match_subject(check_stmts):
|
||||
check_stmts(
|
||||
"""
|
||||
match 1:
|
||||
case 1:
|
||||
pass
|
||||
match 1,:
|
||||
case 1:
|
||||
pass
|
||||
match 1,2:
|
||||
case 1:
|
||||
pass
|
||||
match 1,2,:
|
||||
case 1:
|
||||
pass
|
||||
match (1,2):
|
||||
case 1:
|
||||
pass
|
||||
match *x,:
|
||||
case 1:
|
||||
pass
|
||||
match (...[...][...]):
|
||||
case 1:
|
||||
pass
|
||||
""",
|
||||
run=False,
|
||||
)
|
||||
|
|
|
@ -45,6 +45,10 @@ skip_if_on_travis = pytest.mark.skipif(ON_TRAVIS, reason="not Travis CI friendly
|
|||
|
||||
skip_if_pre_3_8 = pytest.mark.skipif(VER_FULL < (3, 8), reason="Python >= 3.8 feature")
|
||||
|
||||
skip_if_pre_3_10 = pytest.mark.skipif(
|
||||
VER_FULL < (3, 10), reason="Python >= 3.10 feature"
|
||||
)
|
||||
|
||||
|
||||
def skip_if_not_has(exe: str):
|
||||
has_exe = shutil.which(exe)
|
||||
|
|
|
@ -129,7 +129,20 @@ class Execer:
|
|||
tree = self.parse(input, ctx, mode=mode, filename=filename, transform=transform)
|
||||
if tree is None:
|
||||
return compile("pass", filename, mode) # handles comment only input
|
||||
code = compile(tree, filename, mode)
|
||||
try:
|
||||
code = compile(tree, filename, mode)
|
||||
except SyntaxError as e:
|
||||
# Some syntax errors do not occur during parsing, but only later during compiling,
|
||||
# such as a "'return' outside function", or some validations regarding the match statement.
|
||||
# In such a case, the offending line of source code (e.text) is not attached to the exception.
|
||||
if e.text is None:
|
||||
lines = input.splitlines()
|
||||
i = max(
|
||||
0, min(e.lineno - 1, len(lines) - 1)
|
||||
) # clamp so no invalid access due to invalid lineno can occur
|
||||
e.text = lines[i]
|
||||
raise e
|
||||
|
||||
return code
|
||||
|
||||
def eval(
|
||||
|
|
|
@ -13,6 +13,7 @@ from xonsh.lazyasd import lazyobject
|
|||
from xonsh.platform import PYTHON_VERSION_INFO
|
||||
from xonsh.ply.ply.lex import LexToken
|
||||
from xonsh.tokenize import (
|
||||
CASE,
|
||||
COMMENT,
|
||||
DEDENT,
|
||||
DOLLARNAME,
|
||||
|
@ -24,6 +25,7 @@ from xonsh.tokenize import (
|
|||
INDENT,
|
||||
IOREDIRECT,
|
||||
LESS,
|
||||
MATCH,
|
||||
NAME,
|
||||
NEWLINE,
|
||||
NL,
|
||||
|
@ -113,6 +115,9 @@ def token_map():
|
|||
tm[AWAIT] = "AWAIT"
|
||||
if HAS_WALRUS:
|
||||
tm[(OP, ":=")] = "COLONEQUAL"
|
||||
# python 3.10 (backwards and name token compatible) tokens
|
||||
tm[MATCH] = "MATCH"
|
||||
tm[CASE] = "CASE"
|
||||
return tm
|
||||
|
||||
|
||||
|
@ -136,7 +141,7 @@ def handle_name(state, token):
|
|||
if state["pymode"][-1][0]:
|
||||
if needs_whitespace and not has_whitespace:
|
||||
pass
|
||||
elif token.string in kwmod.kwlist:
|
||||
elif token.string in kwmod.kwlist + ["match", "case"]:
|
||||
typ = token.string.upper()
|
||||
yield _new_token(typ, token.string, token.start)
|
||||
else:
|
||||
|
|
|
@ -218,6 +218,7 @@ def raise_parse_error(
|
|||
code: tp.Optional[str] = None,
|
||||
lines: tp.Optional[tp.List[str]] = None,
|
||||
):
|
||||
err_line = None
|
||||
if loc is None or code is None or lines is None:
|
||||
err_line_pointer = ""
|
||||
else:
|
||||
|
@ -468,6 +469,8 @@ class BaseParser:
|
|||
"timesequal",
|
||||
"while",
|
||||
"xorequal",
|
||||
"match",
|
||||
"case",
|
||||
]
|
||||
for rule in tok_rules:
|
||||
self._tok_rule(rule)
|
||||
|
@ -731,18 +734,39 @@ class BaseParser:
|
|||
p[0] = p[2]
|
||||
|
||||
def p_attr_period_name(self, p):
|
||||
"""attr_period_name : PERIOD NAME"""
|
||||
"""attr_period_name : PERIOD name_str"""
|
||||
p[0] = [p[2]]
|
||||
|
||||
def p_name_str(self, p):
|
||||
"""
|
||||
name_str : name
|
||||
"""
|
||||
p[0] = p[1].value
|
||||
|
||||
def p_name(self, p):
|
||||
"""
|
||||
name : name_tok
|
||||
| match_tok
|
||||
| case_tok
|
||||
"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_attr_name(self, p):
|
||||
"""
|
||||
attr_name : attr_name_alone
|
||||
| attr_name_with
|
||||
"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_attr_name_alone(self, p):
|
||||
"""attr_name : name_tok"""
|
||||
"""attr_name_alone : name"""
|
||||
p1 = p[1]
|
||||
p[0] = ast.Name(
|
||||
id=p1.value, ctx=ast.Load(), lineno=p1.lineno, col_offset=p1.lexpos
|
||||
)
|
||||
|
||||
def p_attr_name_with(self, p):
|
||||
"""attr_name : name_tok attr_period_name_list"""
|
||||
"""attr_name_with : name attr_period_name_list"""
|
||||
p1 = p[1]
|
||||
name = ast.Name(
|
||||
id=p1.value, ctx=ast.Load(), lineno=p1.lineno, col_offset=p1.lexpos
|
||||
|
@ -818,7 +842,7 @@ class BaseParser:
|
|||
p[0] = p[2]
|
||||
|
||||
def p_funcdef(self, p):
|
||||
"""funcdef : def_tok NAME parameters rarrow_test_opt COLON suite"""
|
||||
"""funcdef : def_tok name_str parameters rarrow_test_opt COLON suite"""
|
||||
f = ast.FunctionDef(
|
||||
name=p[2],
|
||||
args=p[3],
|
||||
|
@ -946,7 +970,7 @@ class BaseParser:
|
|||
p[0] = p[2]
|
||||
|
||||
def p_tfpdef(self, p):
|
||||
"""tfpdef : name_tok colon_test_opt"""
|
||||
"""tfpdef : name colon_test_opt"""
|
||||
p1 = p[1]
|
||||
kwargs = {
|
||||
"arg": p1.value,
|
||||
|
@ -1092,7 +1116,7 @@ class BaseParser:
|
|||
p[0] = p0
|
||||
|
||||
def p_vfpdef(self, p):
|
||||
"""vfpdef : name_tok"""
|
||||
"""vfpdef : name"""
|
||||
p1 = p[1]
|
||||
kwargs = {
|
||||
"arg": p1.value,
|
||||
|
@ -1448,11 +1472,11 @@ class BaseParser:
|
|||
p[0] = p[1]
|
||||
|
||||
def p_as_name(self, p):
|
||||
"""as_name : AS NAME"""
|
||||
"""as_name : AS name_str"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_import_as_name(self, p):
|
||||
"""import_as_name : NAME as_name_opt"""
|
||||
"""import_as_name : name_str as_name_opt"""
|
||||
p[0] = ast.alias(name=p[1], asname=p[2])
|
||||
|
||||
def p_comma_import_as_name(self, p):
|
||||
|
@ -1489,22 +1513,22 @@ class BaseParser:
|
|||
p[0] = p0
|
||||
|
||||
def p_period_name(self, p):
|
||||
"""period_name : PERIOD NAME"""
|
||||
"""period_name : PERIOD name_str"""
|
||||
p[0] = p[1] + p[2]
|
||||
|
||||
def p_dotted_name(self, p):
|
||||
"""
|
||||
dotted_name : NAME
|
||||
| NAME period_name_list
|
||||
dotted_name : name_str
|
||||
| name_str period_name_list
|
||||
"""
|
||||
p[0] = p[1] if len(p) == 2 else p[1] + p[2]
|
||||
|
||||
def p_comma_name(self, p):
|
||||
"""comma_name : COMMA NAME"""
|
||||
"""comma_name : COMMA name_str"""
|
||||
p[0] = [p[2]]
|
||||
|
||||
def p_global_stmt(self, p):
|
||||
"""global_stmt : global_tok NAME comma_name_list_opt"""
|
||||
"""global_stmt : global_tok name_str comma_name_list_opt"""
|
||||
p1, p2, p3 = p[1], p[2], p[3]
|
||||
names = [p2]
|
||||
if p3 is not None:
|
||||
|
@ -1512,7 +1536,7 @@ class BaseParser:
|
|||
p[0] = ast.Global(names=names, lineno=p1.lineno, col_offset=p1.lexpos)
|
||||
|
||||
def p_nonlocal_stmt(self, p):
|
||||
"""nonlocal_stmt : nonlocal_tok NAME comma_name_list_opt"""
|
||||
"""nonlocal_stmt : nonlocal_tok name_str comma_name_list_opt"""
|
||||
p1, p2, p3 = p[1], p[2], p[3]
|
||||
names = [p2]
|
||||
if p3 is not None:
|
||||
|
@ -2361,7 +2385,7 @@ class BaseParser:
|
|||
p[0] = p[1]
|
||||
|
||||
def p_atom_name(self, p):
|
||||
"""atom : name_tok"""
|
||||
"""atom : name"""
|
||||
p1 = p[1]
|
||||
p[0] = ast.Name(
|
||||
id=p1.value, ctx=ast.Load(), lineno=p1.lineno, col_offset=p1.lexpos
|
||||
|
@ -2391,36 +2415,47 @@ class BaseParser:
|
|||
"""atom : SEARCHPATH"""
|
||||
p[0] = xonsh_pathsearch(p[1], pymode=True, lineno=self.lineno, col=self.col)
|
||||
|
||||
# introduce seemingly superfluous symbol 'atom_dname' to enable reuse it in other places
|
||||
def p_atom_dname_indirection(self, p):
|
||||
"""atom : atom_dname"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_atom_dname(self, p):
|
||||
"""atom : DOLLAR_NAME"""
|
||||
"""atom_dname : DOLLAR_NAME"""
|
||||
p[0] = self._envvar_by_name(p[1][1:], lineno=self.lineno, col=self.col)
|
||||
|
||||
def p_atom_dollar_rule_atom(self, p):
|
||||
"""
|
||||
atom : dollar_rule_atom
|
||||
"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_atom_fistful_of_dollars(self, p):
|
||||
"""
|
||||
atom : dollar_lbrace_tok test RBRACE
|
||||
| bang_lparen_tok subproc RPAREN
|
||||
| dollar_lparen_tok subproc RPAREN
|
||||
| bang_lbracket_tok subproc RBRACKET
|
||||
| dollar_lbracket_tok subproc RBRACKET
|
||||
dollar_rule_atom : dollar_lbrace_tok test RBRACE
|
||||
| bang_lparen_tok subproc RPAREN
|
||||
| dollar_lparen_tok subproc RPAREN
|
||||
| bang_lbracket_tok subproc RBRACKET
|
||||
| dollar_lbracket_tok subproc RBRACKET
|
||||
"""
|
||||
p[0] = self._dollar_rules(p)
|
||||
|
||||
def p_atom_bang_empty_fistful_of_dollars(self, p):
|
||||
"""
|
||||
atom : bang_lparen_tok subproc bang_tok RPAREN
|
||||
| dollar_lparen_tok subproc bang_tok RPAREN
|
||||
| bang_lbracket_tok subproc bang_tok RBRACKET
|
||||
| dollar_lbracket_tok subproc bang_tok RBRACKET
|
||||
dollar_rule_atom : bang_lparen_tok subproc bang_tok RPAREN
|
||||
| dollar_lparen_tok subproc bang_tok RPAREN
|
||||
| bang_lbracket_tok subproc bang_tok RBRACKET
|
||||
| dollar_lbracket_tok subproc bang_tok RBRACKET
|
||||
"""
|
||||
self._append_subproc_bang_empty(p)
|
||||
p[0] = self._dollar_rules(p)
|
||||
|
||||
def p_atom_bang_fistful_of_dollars(self, p):
|
||||
"""
|
||||
atom : bang_lparen_tok subproc bang_tok nocloser rparen_tok
|
||||
| dollar_lparen_tok subproc bang_tok nocloser rparen_tok
|
||||
| bang_lbracket_tok subproc bang_tok nocloser rbracket_tok
|
||||
| dollar_lbracket_tok subproc bang_tok nocloser rbracket_tok
|
||||
dollar_rule_atom : bang_lparen_tok subproc bang_tok nocloser rparen_tok
|
||||
| dollar_lparen_tok subproc bang_tok nocloser rparen_tok
|
||||
| bang_lbracket_tok subproc bang_tok nocloser rbracket_tok
|
||||
| dollar_lbracket_tok subproc bang_tok nocloser rbracket_tok
|
||||
"""
|
||||
self._append_subproc_bang(p)
|
||||
p[0] = self._dollar_rules(p)
|
||||
|
@ -2596,7 +2631,7 @@ class BaseParser:
|
|||
def p_trailer_p3(self, p):
|
||||
"""
|
||||
trailer : LBRACKET subscriptlist RBRACKET
|
||||
| PERIOD NAME
|
||||
| PERIOD name_str
|
||||
"""
|
||||
p[0] = [p[2]]
|
||||
|
||||
|
@ -2893,7 +2928,7 @@ class BaseParser:
|
|||
)
|
||||
|
||||
def p_classdef(self, p):
|
||||
"""classdef : class_tok NAME func_call_opt COLON suite"""
|
||||
"""classdef : class_tok name_str func_call_opt COLON suite"""
|
||||
p1, p3 = p[1], p[3]
|
||||
b, kw = ([], []) if p3 is None else (p3["args"], p3["keywords"])
|
||||
c = ast.ClassDef(
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
# type: ignore
|
||||
# TODO: remove line above once mypy understands the match statement
|
||||
|
||||
"""Handles changes since PY310
|
||||
|
||||
handle
|
||||
- import-alias requiring lineno
|
||||
- match statement
|
||||
"""
|
||||
|
||||
import ast
|
||||
|
@ -16,7 +20,7 @@ class Parser(ThreeNineParser):
|
|||
p[0] = [ast.alias(name=p[1], asname=None, **self.get_line_cols(p, 1))]
|
||||
|
||||
def p_import_as_name(self, p):
|
||||
"""import_as_name : NAME as_name_opt"""
|
||||
"""import_as_name : name_str as_name_opt"""
|
||||
self.p_dotted_as_name(p)
|
||||
|
||||
def p_dotted_as_name(self, p: yacc.YaccProduction):
|
||||
|
@ -36,3 +40,520 @@ class Parser(ThreeNineParser):
|
|||
col_offset=col_offset,
|
||||
end_col_offset=end_col_offset,
|
||||
)
|
||||
|
||||
def _set_error_at_production_index(self, msg, p, i):
|
||||
error_loc = self.get_line_cols(p, i)
|
||||
err_lineno = error_loc["lineno"]
|
||||
err_column = error_loc["col_offset"] + 1
|
||||
self._set_error(msg, self.currloc(lineno=err_lineno, column=err_column))
|
||||
|
||||
def p_compound_stmt_match(self, p):
|
||||
"""
|
||||
compound_stmt : match_stmt
|
||||
"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_match_stmt(self, p):
|
||||
"""
|
||||
match_stmt : match_tok subject_expr COLON NEWLINE INDENT case_block_list_nonempty DEDENT
|
||||
"""
|
||||
|
||||
_, _, subject_expr, _, _, _, case_block_list_nonempty, _ = p
|
||||
|
||||
p[0] = [
|
||||
ast.Match(
|
||||
**self.get_line_cols(p, 1),
|
||||
subject=subject_expr,
|
||||
cases=case_block_list_nonempty,
|
||||
)
|
||||
]
|
||||
|
||||
# case blocks
|
||||
def p_case_block(self, p):
|
||||
"""
|
||||
case_block : case_tok patterns COLON suite
|
||||
| case_tok patterns IF test COLON suite
|
||||
"""
|
||||
|
||||
loc = self.get_line_cols(p, 1)
|
||||
match list(p):
|
||||
case [_, _, pattern, _, suite]:
|
||||
p[0] = ast.match_case(pattern=pattern, body=suite, **loc)
|
||||
case [_, _, pattern, _, guard, _, suite]:
|
||||
p[0] = ast.match_case(pattern=pattern, body=suite, guard=guard, **loc)
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
def p_case_block_list_nonempty(self, p):
|
||||
"""
|
||||
case_block_list_nonempty : case_block
|
||||
| case_block case_block_list_nonempty
|
||||
"""
|
||||
match list(p):
|
||||
case [_, case_block]:
|
||||
p[0] = [case_block]
|
||||
case [_, case_block, case_block_list_nonempty]:
|
||||
p[0] = [case_block] + case_block_list_nonempty
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
# subject expression
|
||||
def p_subject_expr_single_value(self, p):
|
||||
"""
|
||||
subject_expr : test_or_star_expr comma_opt
|
||||
"""
|
||||
|
||||
match list(p):
|
||||
case [_, test_or_star_expr, None]:
|
||||
# single value
|
||||
p[0] = test_or_star_expr
|
||||
case [_, test_or_star_expr, ","]:
|
||||
# tuple with one element
|
||||
p[0] = ast.Tuple(
|
||||
elts=[test_or_star_expr], ctx=ast.Load(), **self.get_line_cols(p, 1)
|
||||
)
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
def p_subject_expr_multiple_values(self, p):
|
||||
"""
|
||||
subject_expr : test_or_star_expr comma_test_or_star_expr_list comma_opt
|
||||
"""
|
||||
|
||||
match list(p):
|
||||
case [_, test_or_star_expr, comma_test_or_star_expr_list, "," | None]:
|
||||
# tuple with more than one element
|
||||
p[0] = ast.Tuple(
|
||||
elts=[test_or_star_expr] + comma_test_or_star_expr_list,
|
||||
ctx=ast.Load(),
|
||||
**self.get_line_cols(p, 1),
|
||||
)
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
# patterns
|
||||
def p_closed_pattern(self, p):
|
||||
"""
|
||||
closed_pattern : literal_pattern
|
||||
| capture_and_wildcard_pattern
|
||||
| group_pattern
|
||||
| sequence_pattern
|
||||
| value_pattern
|
||||
| class_pattern
|
||||
| mapping_pattern
|
||||
"""
|
||||
# productions from closed_pattern to regex_pattern and safe_transform_pattern are located below
|
||||
|
||||
p[0] = p[1]
|
||||
|
||||
def p_patterns(self, p):
|
||||
"""
|
||||
patterns : pattern
|
||||
| open_sequence_pattern
|
||||
"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_pattern(self, p):
|
||||
"""
|
||||
pattern : or_pattern
|
||||
| as_pattern
|
||||
"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_or_pattern(self, p):
|
||||
"""
|
||||
or_pattern : or_pattern_list
|
||||
"""
|
||||
|
||||
_, or_pattern_list = p
|
||||
|
||||
match or_pattern_list:
|
||||
case [single_value]:
|
||||
p[0] = single_value
|
||||
case multiple_values:
|
||||
p[0] = ast.MatchOr(patterns=multiple_values, **self.get_line_cols(p, 1))
|
||||
|
||||
def p_or_pattern_list(self, p):
|
||||
"""
|
||||
or_pattern_list : closed_pattern
|
||||
| closed_pattern PIPE or_pattern_list
|
||||
"""
|
||||
match list(p):
|
||||
case [_, closed_pattern]:
|
||||
p[0] = [closed_pattern]
|
||||
case [_, closed_pattern, "|", or_pattern_list]:
|
||||
p[0] = [closed_pattern] + or_pattern_list
|
||||
|
||||
# group pattern
|
||||
def p_group_pattern(self, p):
|
||||
"""
|
||||
group_pattern : LPAREN pattern RPAREN
|
||||
"""
|
||||
_, _, pattern, _ = p
|
||||
p[0] = pattern
|
||||
|
||||
# literal pattern
|
||||
def p_literal_pattern(self, p):
|
||||
"""
|
||||
literal_pattern : literal_expr
|
||||
"""
|
||||
|
||||
match p[1]:
|
||||
case None | True | False:
|
||||
p[0] = ast.MatchSingleton(value=p[1], **self.get_line_cols(p, 1))
|
||||
case _:
|
||||
p[0] = ast.MatchValue(value=p[1], **self.get_line_cols(p, 1))
|
||||
|
||||
def p_literal_expr_number_or_string_literal_list(self, p):
|
||||
"""
|
||||
literal_expr : complex_number
|
||||
| string_literal_list
|
||||
"""
|
||||
|
||||
p[0] = p[1]
|
||||
|
||||
match p[1]:
|
||||
case ast.JoinedStr():
|
||||
|
||||
raise AssertionError("patterns may not match formatted string literals")
|
||||
# TODO: raise SyntaxError instead
|
||||
# (doing so currently somehow causes an IndexError in tools.py:get_logical_line)
|
||||
|
||||
# TODO: f"hi" "hi" does not parse in xonsh
|
||||
|
||||
def p_literal_expr_none_or_true_or_false(self, p):
|
||||
"""
|
||||
literal_expr : none_tok
|
||||
| true_tok
|
||||
| false_tok
|
||||
"""
|
||||
|
||||
match p[1].value:
|
||||
case "None":
|
||||
value = None
|
||||
case "True":
|
||||
value = True
|
||||
case "False":
|
||||
value = False
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
p[0] = value
|
||||
|
||||
def p_complex_number(self, p):
|
||||
"""
|
||||
complex_number : number
|
||||
| MINUS number
|
||||
| number PLUS number
|
||||
| number MINUS number
|
||||
| MINUS number PLUS number
|
||||
| MINUS number MINUS number
|
||||
"""
|
||||
|
||||
ops = {"+": ast.Add(), "-": ast.Sub()}
|
||||
build_complex = False
|
||||
loc = self.get_line_cols(p, 1)
|
||||
|
||||
match list(p):
|
||||
case [_, x]:
|
||||
p[0] = x
|
||||
case [_, "-", x]:
|
||||
p[0] = ast.UnaryOp(op=ast.USub(), operand=x, **loc)
|
||||
case [_, left, ("+" | "-") as op_char, right]:
|
||||
build_complex = True
|
||||
negate_left_side = False
|
||||
case [_, "-", left, ("+" | "-") as op_char, right]:
|
||||
build_complex = True
|
||||
negate_left_side = True
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
if build_complex:
|
||||
# TODO raise syntax error instead (see reason in p_literal_expr_number_or_string_literal_list)
|
||||
assert isinstance(
|
||||
right.value, complex
|
||||
), "right part of complex literal must be imaginary"
|
||||
|
||||
if negate_left_side:
|
||||
left = ast.UnaryOp(op=ast.USub(), operand=left, **loc)
|
||||
|
||||
p[0] = ast.BinOp(left=left, op=ops[op_char], right=right, **loc)
|
||||
|
||||
# capture- and wildcard-pattern
|
||||
def p_as_pattern(self, p):
|
||||
"""
|
||||
as_pattern : or_pattern AS capture_target_name
|
||||
"""
|
||||
|
||||
_, or_pattern, _, name = p
|
||||
|
||||
p[0] = ast.MatchAs(pattern=or_pattern, name=name, **self.get_line_cols(p, 1))
|
||||
|
||||
def p_capture_target_name(self, p):
|
||||
"""
|
||||
capture_target_name : name_str
|
||||
"""
|
||||
name = p[1]
|
||||
if name == "_":
|
||||
self._set_error_at_production_index(
|
||||
"can't capture name '_' in patterns", p, 1
|
||||
)
|
||||
p[0] = name
|
||||
|
||||
def p_capture_and_wildcard_pattern(self, p):
|
||||
"""
|
||||
capture_and_wildcard_pattern : name_str
|
||||
"""
|
||||
# TODO: according to the spec we would need the negative lookahead !('.' | '(' | '=')
|
||||
# (also in p_star_pattern, p_value_pattern)
|
||||
# but parsing seems to work just fine
|
||||
|
||||
_, name = p
|
||||
|
||||
target = name if name != "_" else None
|
||||
|
||||
p[0] = ast.MatchAs(name=target, **self.get_line_cols(p, 1))
|
||||
|
||||
# sequence pattern
|
||||
def p_sequence_pattern_square_brackets(self, p):
|
||||
"""
|
||||
sequence_pattern : LBRACKET maybe_sequence_pattern RBRACKET
|
||||
| LBRACKET RBRACKET
|
||||
| LPAREN open_sequence_pattern RPAREN
|
||||
| LPAREN RPAREN
|
||||
"""
|
||||
|
||||
match list(p):
|
||||
case [_, _, ast.MatchSequence() as seq, _]:
|
||||
p[0] = seq
|
||||
case [_, _, single_item, _]:
|
||||
p[0] = ast.MatchSequence(
|
||||
patterns=[single_item], **self.get_line_cols(p, 1)
|
||||
)
|
||||
case [_, _, _]:
|
||||
p[0] = ast.MatchSequence(patterns=[], **self.get_line_cols(p, 1))
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
def p_maybe_sequence_pattern(self, p):
|
||||
"""
|
||||
maybe_sequence_pattern : maybe_star_pattern comma_opt
|
||||
| maybe_star_pattern COMMA maybe_sequence_pattern
|
||||
"""
|
||||
|
||||
match list(p):
|
||||
case [_, maybe_star_pattern, ","]:
|
||||
p[0] = ast.MatchSequence(
|
||||
patterns=[maybe_star_pattern], **self.get_line_cols(p, 1)
|
||||
)
|
||||
case [_, maybe_star_pattern, None]:
|
||||
p[0] = maybe_star_pattern
|
||||
case [
|
||||
_,
|
||||
maybe_star_pattern,
|
||||
",",
|
||||
ast.MatchSequence(patterns=list(maybe_sequence_pattern)),
|
||||
]:
|
||||
p[0] = ast.MatchSequence(
|
||||
patterns=[maybe_star_pattern] + maybe_sequence_pattern,
|
||||
**self.get_line_cols(p, 1),
|
||||
)
|
||||
case [_, maybe_star_pattern, ",", maybe_sequence_pattern]:
|
||||
p[0] = ast.MatchSequence(
|
||||
patterns=[maybe_star_pattern, maybe_sequence_pattern],
|
||||
**self.get_line_cols(p, 1),
|
||||
)
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
def p_open_sequence_pattern(self, p):
|
||||
"""
|
||||
open_sequence_pattern : maybe_star_pattern COMMA
|
||||
| maybe_star_pattern COMMA maybe_sequence_pattern
|
||||
"""
|
||||
self.p_maybe_sequence_pattern(p)
|
||||
|
||||
def p_maybe_star_pattern(self, p):
|
||||
"""
|
||||
maybe_star_pattern : pattern
|
||||
| star_pattern
|
||||
"""
|
||||
|
||||
p[0] = p[1]
|
||||
|
||||
def p_star_pattern(self, p):
|
||||
"""
|
||||
star_pattern : TIMES name_str
|
||||
"""
|
||||
|
||||
_, _, name = p
|
||||
target = name if name != "_" else None
|
||||
|
||||
p[0] = ast.MatchStar(name=target, **self.get_line_cols(p, 1))
|
||||
|
||||
def p_value_pattern(self, p):
|
||||
"""
|
||||
value_pattern : attr_name_with
|
||||
"""
|
||||
|
||||
p[0] = ast.MatchValue(value=p[1], **self.get_line_cols(p, 1))
|
||||
|
||||
# This is implemented via this 'chain' grammer since implementing the grammar from the spec verbatim leads to bad parser states (regarding comma tokens)
|
||||
def p_class_pattern(self, p):
|
||||
"""
|
||||
class_pattern : attr_name LPAREN class_pattern_positional_part_start RPAREN
|
||||
"""
|
||||
|
||||
positional_patterns, keyword_patterns_key_value_tuple_list = p[3]
|
||||
|
||||
if keyword_patterns_key_value_tuple_list:
|
||||
# transpose, e.g. [ (a, 1), (b, 2) ] to [a, b], [1, 2]
|
||||
kwd_attrs, kwd_patterns = list(zip(*keyword_patterns_key_value_tuple_list))
|
||||
else:
|
||||
kwd_attrs, kwd_patterns = [], []
|
||||
|
||||
p[0] = ast.MatchClass(
|
||||
cls=p[1],
|
||||
patterns=positional_patterns,
|
||||
kwd_attrs=list(kwd_attrs),
|
||||
kwd_patterns=list(kwd_patterns),
|
||||
**self.get_line_cols(p, 1),
|
||||
)
|
||||
|
||||
# returns ( [pattern], [ (name, pattern) ] )
|
||||
def p_class_pattern_positional_part_start(self, p):
|
||||
"""
|
||||
class_pattern_positional_part_start :
|
||||
| pattern
|
||||
| pattern COMMA class_pattern_positional_part
|
||||
| name_str EQUALS pattern
|
||||
| name_str EQUALS pattern COMMA class_pattern_keyword_part
|
||||
"""
|
||||
|
||||
match list(p):
|
||||
case [_]:
|
||||
p[0] = ([], [])
|
||||
case [_, pattern]:
|
||||
p[0] = ([pattern], [])
|
||||
case [_, pattern, ",", (names, patterns)]:
|
||||
p[0] = ([pattern] + names, patterns)
|
||||
case [_, name, "=", pattern]:
|
||||
p[0] = ([], [(name, pattern)])
|
||||
case [_, name, "=", pattern, ",", class_pattern_keyword_part]:
|
||||
p[0] = ([], [(name, pattern)] + class_pattern_keyword_part)
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
# returns ( [pattern], [ (name, pattern) ] )
|
||||
def p_class_pattern_positional_part_skip(self, p):
|
||||
"""
|
||||
class_pattern_positional_part : class_pattern_keyword_part
|
||||
"""
|
||||
p[0] = ([], p[1])
|
||||
|
||||
# returns ( [pattern], [ (name, pattern) ] )
|
||||
def p_class_pattern_positional_part(self, p):
|
||||
"""
|
||||
class_pattern_positional_part : pattern
|
||||
| pattern COMMA class_pattern_positional_part
|
||||
"""
|
||||
|
||||
match list(p):
|
||||
case [_, pattern]:
|
||||
p[0] = ([pattern], [])
|
||||
case [_, pattern, ",", (names, patterns)]:
|
||||
p[0] = ([pattern] + names, patterns)
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
# returns [ (name, pattern) ]
|
||||
def p_class_pattern_keyword_part(self, p):
|
||||
"""
|
||||
class_pattern_keyword_part :
|
||||
| COMMA
|
||||
| name_str EQUALS pattern
|
||||
| name_str EQUALS pattern COMMA class_pattern_keyword_part
|
||||
"""
|
||||
|
||||
match list(p):
|
||||
case [_] | [_, ","]:
|
||||
p[0] = []
|
||||
case [_, name, "=", pattern]:
|
||||
p[0] = [(name, pattern)]
|
||||
case [_, name, "=", pattern, ",", class_pattern_keyword_part]:
|
||||
p[0] = [(name, pattern)] + class_pattern_keyword_part
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
# Mapping pattern
|
||||
|
||||
def p_mapping_pattern(self, p):
|
||||
"""
|
||||
mapping_pattern : LBRACE mapping_pattern_args_start RBRACE
|
||||
"""
|
||||
|
||||
_, _, (keys, patterns, rest), _ = p
|
||||
|
||||
p[0] = ast.MatchMapping(
|
||||
keys=keys, patterns=patterns, rest=rest, **self.get_line_cols(p, 1)
|
||||
)
|
||||
|
||||
# see p_class_pattern for rationale
|
||||
def p_mapping_pattern_args_start(self, p):
|
||||
"""
|
||||
mapping_pattern_args_start :
|
||||
| key_value_pattern
|
||||
| key_value_pattern COMMA mapping_pattern_args_item_part
|
||||
| double_star_pattern
|
||||
"""
|
||||
match list(p):
|
||||
case [_]:
|
||||
p[0] = [], [], None
|
||||
case [_, (key, value)]:
|
||||
p[0] = [key], [value], None
|
||||
case [_, (key, value), ",", (keys, values, rest)]:
|
||||
p[0] = [key] + keys, [value] + values, rest
|
||||
case [_, str(double_star_pattern)]:
|
||||
p[0] = [], [], double_star_pattern
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
def p_mapping_pattern_args_item_part_skip(self, p):
|
||||
"""
|
||||
mapping_pattern_args_item_part :
|
||||
| double_star_pattern
|
||||
"""
|
||||
match list(p):
|
||||
case [_]:
|
||||
p[0] = [], [], None
|
||||
case [_, rest]:
|
||||
p[0] = [], [], rest
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
def p_mapping_pattern_args_item_part(self, p):
|
||||
"""
|
||||
mapping_pattern_args_item_part : key_value_pattern
|
||||
| key_value_pattern COMMA mapping_pattern_args_item_part
|
||||
"""
|
||||
match list(p):
|
||||
case [_, (key, value)]:
|
||||
p[0] = [key], [value], None
|
||||
case [_, (key, value), ",", (keys, values, rest)]:
|
||||
p[0] = [key] + keys, [value] + values, rest
|
||||
case _:
|
||||
raise AssertionError()
|
||||
|
||||
def p_double_star_pattern(self, p):
|
||||
"""
|
||||
double_star_pattern : POW capture_target_name comma_opt
|
||||
"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_key_value_pattern(self, p):
|
||||
"""
|
||||
key_value_pattern : literal_expr COLON pattern
|
||||
| attr_name_with COLON pattern
|
||||
"""
|
||||
_, key, _, value = p
|
||||
p[0] = key, value
|
||||
|
|
|
@ -114,6 +114,8 @@ __all__ = token.__all__ + [ # type:ignore
|
|||
"ATEQUAL",
|
||||
"DOLLARNAME",
|
||||
"IOREDIRECT",
|
||||
"MATCH",
|
||||
"CASE",
|
||||
]
|
||||
HAS_ASYNC = PYTHON_VERSION_INFO < (3, 7, 0)
|
||||
if HAS_ASYNC:
|
||||
|
@ -151,6 +153,12 @@ N_TOKENS += 1
|
|||
ATEQUAL = N_TOKENS
|
||||
tok_name[N_TOKENS] = "ATEQUAL"
|
||||
N_TOKENS += 1
|
||||
MATCH = N_TOKENS
|
||||
tok_name[N_TOKENS] = "MATCH"
|
||||
N_TOKENS += 1
|
||||
CASE = N_TOKENS
|
||||
tok_name[N_TOKENS] = "CASE"
|
||||
N_TOKENS += 1
|
||||
_xonsh_tokens = {
|
||||
"?": "QUESTION",
|
||||
"@=": "ATEQUAL",
|
||||
|
@ -165,6 +173,8 @@ _xonsh_tokens = {
|
|||
"${": "DOLLARLBRACE",
|
||||
"??": "DOUBLEQUESTION",
|
||||
"@$(": "ATDOLLARLPAREN",
|
||||
"match": "MATCH",
|
||||
"case": "CASE",
|
||||
}
|
||||
|
||||
additional_parenlevs = frozenset({"@(", "!(", "![", "$(", "$[", "${", "@$("})
|
||||
|
|
Loading…
Add table
Reference in a new issue