Py 312 pre-liminary support (#5156)

* refactor: project requires 3.9+

changed as per NEP-29
https://numpy.org/neps/nep-0029-deprecation_policy.html

* test: nose type tests are deprecated in pytest now

* fix: deprecation of ast.Str and ast.Bytes and .s attribute access

* fix: deprecation of ast.Num,ast.NameConstant,ast.Ellipsis

* refactor: upgrade code to be py39+ using ruff

the changes are auto-generated

* refactor: remove typing.Annotated compatibility code

* fix: temporarily disable having xonsh syntax inside f-strings

* test: skip failing tests

there is no workaround for this version. It might get solved in the
final release though

* refactor: make XonshSession.completer lazily populated

this speedsup the tests as cmd_cache would not be actively populated
when used in default_completers function

* refactor: make presence of walrus operator default
This commit is contained in:
Noorhteen Raja NJ 2023-07-04 22:18:37 +05:30 committed by GitHub
parent 041bfb2d29
commit 769dfbe6aa
Failed to generate hash of commit
45 changed files with 279 additions and 236 deletions

View file

@ -17,10 +17,10 @@ jobs:
fail-fast: false
matrix:
python-version:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12-dev"
steps:
- uses: actions/checkout@v3
- name: setup-python
@ -32,7 +32,7 @@ jobs:
- name: install pypa/build
run: pip install build
- name: build sdist(tarball) to dist/
if: ${{ startsWith(matrix.python-version, '3.10') }}
if: ${{ startsWith(matrix.python-version, '3.11') }}
run: python -m build . --sdist
- name: build bdist(wheel) to dist/
run: python -m build . --wheel

View file

@ -32,10 +32,10 @@ jobs:
- macOS-latest
- windows-latest
python-version:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12-dev"
name: Test Python ${{ matrix.python-version }} ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
@ -54,20 +54,20 @@ jobs:
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tmate_debug_enabled }}
timeout-minutes: 15
- name: Run QA Checks
if: ${{ startsWith(matrix.os, 'ubuntu') && startsWith(matrix.python-version, '3.10') }}
if: ${{ startsWith(matrix.os, 'ubuntu') && startsWith(matrix.python-version, '3.11') }}
run: python -m xonsh run-tests.xsh validate-news-items
- name: Run tests
if: ${{ !startsWith(matrix.python-version, '3.10') }}
if: ${{ !startsWith(matrix.python-version, '3.11') }}
run: |
python -m pip install . --no-deps
python -m xonsh run-tests.xsh test -- --timeout=240
- name: Run tests with coverage
if: ${{ startsWith(matrix.python-version, '3.10') }}
if: ${{ startsWith(matrix.python-version, '3.11') }}
run: |
python -m pip install -e . --no-deps
python -m xonsh run-tests.xsh test --report-coverage -- --timeout=240
- name: Upload coverage to Codecov
if: ${{ startsWith(matrix.python-version, '3.10') }}
if: ${{ startsWith(matrix.python-version, '3.11') }}
uses: codecov/codecov-action@v3
with:
verbose: true

23
news/py39+.rst Normal file
View file

@ -0,0 +1,23 @@
**Added:**
* Added preliminary support for Python v3.12
**Changed:**
* Dropped support for Python v3.8
**Deprecated:**
* <news item>
**Removed:**
* <news item>
**Fixed:**
* <news item>
**Security:**
* <news item>

View file

@ -14,7 +14,7 @@ description = "Python-powered, cross-platform, Unix-gazing shell"
authors = [{ name = "Anthony Scopatz" }, { email = "scopatz@gmail.com" }]
maintainers = [{ name = "Anthony Scopatz" }, { email = "scopatz@gmail.com" }]
license = { text = "BSD 2-Clause License" }
requires-python = ">=3.8"
requires-python = ">=3.9"
[tool.setuptools.dynamic]
version = {attr = "xonsh.__version__"}

View file

@ -40,7 +40,7 @@ def test_skipper_command(completion_context_parse):
def test_skipper_arg(completion_context_parse, xession, monkeypatch):
monkeypatch.setattr(xession.shell.shell, "completer", Completer(), raising=False)
bash_completer_mock = Mock()
monkeypatch.setattr(xession, "completers", {"bash": bash_completer_mock})
monkeypatch.setattr(xession, "_completers", {"bash": bash_completer_mock})
bash_completer_mock.return_value = {"--count "}

View file

@ -10,7 +10,7 @@ def mock_completer(monkeypatch, xsh_with_aliases):
xsh = xsh_with_aliases
monkeypatch.setattr(
xsh, "completers", {"one": dummy_completer, "two": complete_aliases}
xsh, "_completers", {"one": dummy_completer, "two": complete_aliases}
)
monkeypatch.setattr(xsh, "ctx", {"three": lambda: 1, "four": lambda: 2})
return xsh

View file

@ -2,7 +2,6 @@ import os
import subprocess as sp
import textwrap
from pathlib import Path
from typing import Dict, List
from unittest.mock import Mock
import pytest
@ -15,7 +14,7 @@ VC_BRANCH = {
"hg": {"default"},
"fossil": {"trunk"},
}
VC_INIT: Dict[str, List[List[str]]] = {
VC_INIT: dict[str, list[list[str]]] = {
# A sequence of commands required to initialize a repository
"git": [["init"]],
"hg": [["init"]],

View file

@ -19,7 +19,7 @@ def completer():
@pytest.fixture
def completers_mock(xession, monkeypatch):
completers = {}
monkeypatch.setattr(xession, "completers", completers)
monkeypatch.setattr(xession, "_completers", completers)
return completers

View file

@ -5,7 +5,7 @@ import textwrap
import pytest
from xonsh.ast import AST, Call, Pass, Str, With
from xonsh.ast import AST, Call, Pass, With, is_const_str
from xonsh.parser import Parser
from xonsh.parsers.fstring_adaptor import FStringAdaptor
from xonsh.pytest.tools import (
@ -168,6 +168,8 @@ def test_string_literal_concat(first_prefix, second_prefix, check_ast):
def test_f_env_var(check_xonsh_ast):
if VER_MAJOR_MINOR > (3, 11):
pytest.xfail("f-string with special syntax are not supported yet")
check_xonsh_ast({}, 'f"{$HOME}"', run=False)
check_xonsh_ast({}, "f'{$XONSH_DEBUG}'", run=False)
check_xonsh_ast({}, 'F"{$PATH} and {$XONSH_DEBUG}"', run=False)
@ -201,13 +203,14 @@ bar"""''',
bar"""''',
"foo\n_/foo/bar_\nbar",
),
("f'{$HOME=}'", "$HOME='/foo/bar'"),
]
if VER_MAJOR_MINOR >= (3, 8):
fstring_adaptor_parameters.append(("f'{$HOME=}'", "$HOME='/foo/bar'"))
@pytest.mark.parametrize("inp, exp", fstring_adaptor_parameters)
def test_fstring_adaptor(inp, exp, xsh, monkeypatch):
if VER_MAJOR_MINOR > (3, 11):
pytest.xfail("f-string with special syntax are not supported yet")
joined_str_node = FStringAdaptor(inp, "f").run()
assert isinstance(joined_str_node, ast.JoinedStr)
node = ast.Expression(body=joined_str_node)
@ -2795,7 +2798,7 @@ def test_macro_call_one_arg(check_xonsh_ast, s):
assert isinstance(tree, AST)
args = tree.body.args[1].elts
assert len(args) == 1
assert args[0].s == s.strip()
assert args[0].value == s.strip()
@pytest.mark.parametrize("s,t", itertools.product(MACRO_ARGS[::2], MACRO_ARGS[1::2]))
@ -2805,8 +2808,8 @@ def test_macro_call_two_args(check_xonsh_ast, s, t):
assert isinstance(tree, AST)
args = tree.body.args[1].elts
assert len(args) == 2
assert args[0].s == s.strip()
assert args[1].s == t.strip()
assert args[0].value == s.strip()
assert args[1].value == t.strip()
@pytest.mark.parametrize(
@ -2818,9 +2821,9 @@ def test_macro_call_three_args(check_xonsh_ast, s, t, u):
assert isinstance(tree, AST)
args = tree.body.args[1].elts
assert len(args) == 3
assert args[0].s == s.strip()
assert args[1].s == t.strip()
assert args[2].s == u.strip()
assert args[0].value == s.strip()
assert args[1].value == t.strip()
assert args[2].value == u.strip()
@pytest.mark.parametrize("s", MACRO_ARGS)
@ -2830,7 +2833,7 @@ def test_macro_call_one_trailing(check_xonsh_ast, s):
assert isinstance(tree, AST)
args = tree.body.args[1].elts
assert len(args) == 1
assert args[0].s == s.strip()
assert args[0].value == s.strip()
@pytest.mark.parametrize("s", MACRO_ARGS)
@ -2840,7 +2843,7 @@ def test_macro_call_one_trailing_space(check_xonsh_ast, s):
assert isinstance(tree, AST)
args = tree.body.args[1].elts
assert len(args) == 1
assert args[0].s == s.strip()
assert args[0].value == s.strip()
SUBPROC_MACRO_OC = [("!(", ")"), ("$(", ")"), ("![", "]"), ("$[", "]")]
@ -2853,7 +2856,7 @@ def test_empty_subprocbang(opener, closer, body, check_xonsh_ast):
assert isinstance(tree, AST)
cmd = tree.body.args[0].elts
assert len(cmd) == 2
assert cmd[1].s == ""
assert cmd[1].value == ""
@pytest.mark.parametrize("opener, closer", SUBPROC_MACRO_OC)
@ -2863,7 +2866,7 @@ def test_single_subprocbang(opener, closer, body, check_xonsh_ast):
assert isinstance(tree, AST)
cmd = tree.body.args[0].elts
assert len(cmd) == 2
assert cmd[1].s == "x"
assert cmd[1].value == "x"
@pytest.mark.parametrize("opener, closer", SUBPROC_MACRO_OC)
@ -2875,7 +2878,7 @@ def test_arg_single_subprocbang(opener, closer, body, check_xonsh_ast):
assert isinstance(tree, AST)
cmd = tree.body.args[0].elts
assert len(cmd) == 3
assert cmd[2].s == "x"
assert cmd[2].value == "x"
@pytest.mark.parametrize("opener, closer", SUBPROC_MACRO_OC)
@ -2890,7 +2893,7 @@ def test_arg_single_subprocbang_nested(
assert isinstance(tree, AST)
cmd = tree.body.args[0].elts
assert len(cmd) == 3
assert cmd[2].s == "x"
assert cmd[2].value == "x"
@pytest.mark.parametrize("opener, closer", SUBPROC_MACRO_OC)
@ -2921,7 +2924,7 @@ def test_many_subprocbang(opener, closer, body, check_xonsh_ast):
assert isinstance(tree, AST)
cmd = tree.body.args[0].elts
assert len(cmd) == 2
assert cmd[1].s == body.partition("!")[-1].strip()
assert cmd[1].value == body.partition("!")[-1].strip()
WITH_BANG_RAWSUITES = [
@ -2955,7 +2958,7 @@ def test_withbang_single_suite(body, check_xonsh_ast):
assert isinstance(wither.body[0], Pass)
assert len(wither.items) == 1
item = wither.items[0]
s = item.context_expr.args[1].s
s = item.context_expr.args[1].value
assert s == body
@ -2971,7 +2974,7 @@ def test_withbang_as_single_suite(body, check_xonsh_ast):
assert len(wither.items) == 1
item = wither.items[0]
assert item.optional_vars.id == "y"
s = item.context_expr.args[1].s
s = item.context_expr.args[1].value
assert s == body
@ -2993,7 +2996,7 @@ def test_withbang_single_suite_trailing(body, check_xonsh_ast):
assert isinstance(wither.body[0], Pass)
assert len(wither.items) == 1
item = wither.items[0]
s = item.context_expr.args[1].s
s = item.context_expr.args[1].value
assert s == body + "\n"
@ -3016,7 +3019,7 @@ def test_withbang_single_simple(body, check_xonsh_ast):
assert isinstance(wither.body[0], Pass)
assert len(wither.items) == 1
item = wither.items[0]
s = item.context_expr.args[1].s
s = item.context_expr.args[1].value
assert s == body
@ -3032,7 +3035,7 @@ def test_withbang_single_simple_opt(body, check_xonsh_ast):
assert len(wither.items) == 1
item = wither.items[0]
assert item.optional_vars.id == "y"
s = item.context_expr.args[1].s
s = item.context_expr.args[1].value
assert s == body
@ -3050,7 +3053,7 @@ def test_withbang_as_many_suite(body, check_xonsh_ast):
for i, targ in enumerate("abc"):
item = wither.items[i]
assert item.optional_vars.id == targ
s = item.context_expr.args[1].s
s = item.context_expr.args[1].value
assert s == body
@ -3064,8 +3067,8 @@ def test_subproc_raw_str_literal(check_xonsh_ast):
tree = check_xonsh_ast({}, "!(echo r'$foo')", run=False, return_obs=True)
assert isinstance(tree, AST)
subproc = tree.body
assert isinstance(subproc.args[0].elts[1], Str)
assert subproc.args[0].elts[1].s == "$foo"
assert is_const_str(subproc.args[0].elts[1])
assert subproc.args[0].elts[1].value == "$foo"
# test invalid expressions

View file

@ -10,7 +10,7 @@ import pytest
from xonsh import __version__
from xonsh.lexer import Lexer
from xonsh.platform import HAS_PYGMENTS, ON_WINDOWS
from xonsh.platform import HAS_PYGMENTS, ON_WINDOWS, PYTHON_VERSION_INFO
from xonsh.pytest.tools import skip_if_on_windows
from xonsh.tools import (
EnvPath,
@ -899,7 +899,19 @@ def test_str_to_env_path(inp, exp):
assert exp == obs.paths
@pytest.mark.parametrize("inp, exp", [(pathlib.Path("///tmp"), "/tmp")])
@pytest.mark.parametrize(
"inp, exp",
[
pytest.param(
pathlib.Path("///tmp"),
"/tmp",
marks=pytest.mark.skipif(
ON_WINDOWS and PYTHON_VERSION_INFO > (3, 11),
reason="Python 3.12 on windows changed its behavior of resolving additional slashes in paths",
),
),
],
)
def test_path_to_str(inp, exp):
obs = path_to_str(inp)
if ON_WINDOWS:

View file

@ -1,14 +1,15 @@
import os
import tempfile
from unittest import TestCase
from xonsh.tools import ON_WINDOWS
from xonsh.xoreutils import _which
class TestWhich:
class TestWhich(TestCase):
# Tests for the _whichgen function which is the only thing we
# use from the _which.py module.
def setup(self):
def setUp(self):
# Setup two folders with some test files.
self.testdirs = [tempfile.TemporaryDirectory(), tempfile.TemporaryDirectory()]
if ON_WINDOWS:
@ -24,7 +25,7 @@ class TestWhich:
f.write(b"")
os.chmod(path, 0o755)
def teardown_module(self):
def tearDown(self):
for d in self.testdirs:
d.cleanup()

View file

@ -409,7 +409,7 @@ def xonsh_reset(args, stdin=None):
def source_foreign_fn(
shell: str,
files_or_code: Annotated[tp.List[str], Arg(nargs="+")],
files_or_code: Annotated[list[str], Arg(nargs="+")],
interactive=True,
login=False,
envcmd=None,
@ -481,7 +481,7 @@ def source_foreign_fn(
if not suppress_skip_message
else suppress_skip_message
)
files: tp.Tuple[str, ...] = ()
files: tuple[str, ...] = ()
if prevcmd:
pass # don't change prevcmd if given explicitly
elif os.path.isfile(files_or_code[0]):
@ -607,7 +607,7 @@ def source_alias(args, stdin=None):
def source_cmd_fn(
files: Annotated[tp.List[str], Arg(nargs="+")],
files: Annotated[list[str], Arg(nargs="+")],
login=False,
aliascmd=None,
extra_args="",
@ -690,7 +690,7 @@ source_cmd = ArgParserAlias(func=source_cmd_fn, has_args=True, prog="source-cmd"
def xexec_fn(
command: Annotated[tp.List[str], Arg(nargs=argparse.REMAINDER)],
command: Annotated[list[str], Arg(nargs=argparse.REMAINDER)],
login=False,
clean=False,
name="",

View file

@ -25,7 +25,6 @@ from ast import (
BitXor,
BoolOp,
Break,
Bytes,
Call,
ClassDef,
Compare,
@ -72,13 +71,12 @@ from ast import (
Module,
Mult,
Name,
NameConstant,
NamedExpr,
NodeTransformer,
Nonlocal,
Not,
NotEq,
NotIn,
Num,
Or,
Pass,
Pow,
@ -90,7 +88,6 @@ from ast import (
Slice,
Starred,
Store,
Str,
Sub,
Subscript,
Try,
@ -113,15 +110,10 @@ from ast import (
walk,
withitem,
)
from ast import Ellipsis as EllipsisNode
from xonsh.built_ins import XSH
from xonsh.platform import PYTHON_VERSION_INFO
from xonsh.tools import find_next_break, get_logical_line, subproc_toks
if PYTHON_VERSION_INFO > (3, 8):
from ast import NamedExpr # type:ignore
STATEMENTS = (
FunctionDef,
ClassDef,
@ -148,6 +140,38 @@ STATEMENTS = (
)
def const_str(s: str, **kwargs):
return Constant(value=s, kind="str", **kwargs)
def is_const_str(node):
return isinstance(node, Constant) and node.kind == "str"
def const_bytes(s: str, **kwargs):
return Constant(value=s, kind="bytes", **kwargs)
def is_const_bytes(node):
return isinstance(node, Constant) and node.kind == "bytes"
def const_num(n, **kwargs):
return Constant(value=n, kind="num", **kwargs)
def is_const_num(node):
return isinstance(node, Constant) and node.kind == "num"
def const_name(value, **kwargs):
return Constant(value=value, kind="name", **kwargs)
def is_const_name(node):
return isinstance(node, Constant) and node.kind == "name"
def leftmostname(node):
"""Attempts to find the first name in the tree."""
if isinstance(node, Name):
@ -166,7 +190,7 @@ def leftmostname(node):
rtn = leftmostname(node.targets[0])
elif isinstance(node, AnnAssign):
rtn = leftmostname(node.target)
elif isinstance(node, (Str, Bytes, JoinedStr)):
elif isinstance(node, JoinedStr) or is_const_str(node) or is_const_bytes(node):
# handles case of "./my executable"
rtn = leftmostname(node.s)
elif isinstance(node, Tuple) and len(node.elts) > 0:
@ -670,8 +694,8 @@ def _getblockattr(name, lineno, col):
"getattr",
args=[
Name(id=name, ctx=Load(), lineno=lineno, col_offset=col),
Str(s="__xonsh_block__", lineno=lineno, col_offset=col),
NameConstant(value=False, lineno=lineno, col_offset=col),
const_str(s="__xonsh_block__", lineno=lineno, col_offset=col),
const_name(value=False, lineno=lineno, col_offset=col),
],
lineno=lineno,
col=col,

View file

@ -557,7 +557,7 @@ class XonshSession:
self.commands_cache = None
self.modules_cache = None
self.all_jobs = None
self.completers = None
self._completers = None
self.builtins = None
self._initial_builtin_names = None
@ -567,6 +567,15 @@ class XonshSession:
return
return self.commands_cache.aliases
@property
def completers(self):
"""Returns a list of all available completers. Init when first accessing the attribute"""
if self._completers is None:
from xonsh.completers.init import default_completers
self._completers = default_completers(self.commands_cache)
return self._completers
def _disable_python_exit(self):
# Disable Python interactive quit/exit
if hasattr(builtins, "exit"):
@ -594,7 +603,6 @@ class XonshSession:
Context to start xonsh session with.
"""
from xonsh.commands_cache import CommandsCache
from xonsh.completers.init import default_completers
from xonsh.environ import Env, default_env
if not hasattr(builtins, "__xonsh__"):
@ -628,7 +636,6 @@ class XonshSession:
)
self.link_builtins()
self.builtins_loaded = True
self.completers = default_completers(self.commands_cache)
def flush_on_exit(s=None, f=None):
if self.history is not None:
@ -672,6 +679,7 @@ class XonshSession:
self.unlink_builtins()
delattr(builtins, "__xonsh__")
self.builtins_loaded = False
self._completers = None
def get_default_builtins(execer=None):

View file

@ -12,36 +12,11 @@ import os
import sys
import typing as tp
from collections import defaultdict
from typing import Annotated
from xonsh.built_ins import XSH
from xonsh.completers.tools import RichCompletion
TYPING_ANNOTATED_AVAILABLE = False
"""One can import ``Annotated`` from this module
which adds a stub when it is not available in ``typing``/``typing_extensions`` modules."""
try:
from typing import Annotated # noqa
TYPING_ANNOTATED_AVAILABLE = True
except ImportError:
try:
from typing_extensions import Annotated # type: ignore
TYPING_ANNOTATED_AVAILABLE = True
except ImportError:
T = tp.TypeVar("T") # Declare type variable
class _AnnotatedMeta(type):
def __getitem__(self, item: tp.Tuple[T, tp.Any]) -> T:
if tp.TYPE_CHECKING:
return item[0]
return item[1]
class Annotated(metaclass=_AnnotatedMeta): # type: ignore
pass
class ArgCompleter:
"""Gives a structure to the argparse completers"""
@ -114,8 +89,8 @@ class NumpyDoc:
return NumpyDoc.join(desc), lines[idx + 2 :]
@staticmethod
def get_param_doc(lines: tp.List[str]):
docs: tp.Dict[str, tp.List[str]] = defaultdict(list)
def get_param_doc(lines: list[str]):
docs: dict[str, list[str]] = defaultdict(list)
name = None
while lines:
@ -139,11 +114,11 @@ class NumpyDoc:
_FUNC_NAME = "_func_"
def _get_args_kwargs(annot: tp.Any) -> tp.Tuple[tp.Sequence[str], tp.Dict[str, tp.Any]]:
def _get_args_kwargs(annot: tp.Any) -> tuple[tp.Sequence[str], dict[str, tp.Any]]:
args, kwargs = [], {}
if isinstance(annot, tuple):
args, kwargs = annot
elif TYPING_ANNOTATED_AVAILABLE and "Annotated[" in str(annot):
elif "Annotated[" in str(annot):
if hasattr(annot, "__metadata__"):
args, kwargs = annot.__metadata__[0]
else:
@ -383,7 +358,7 @@ class ArgParser(ap.ArgumentParser):
return parser
def _dispatch_func(func: tp.Callable, ns: tp.Dict[str, tp.Any]):
def _dispatch_func(func: tp.Callable, ns: dict[str, tp.Any]):
"""Final dispatch to the function based on signature."""
sign = inspect.signature(func)
kwargs = {}
@ -440,7 +415,7 @@ class ArgparseCompleter:
self.kwargs = kwargs
@staticmethod
def get_parser(parser, args) -> tp.Tuple[ap.ArgumentParser, tp.Tuple[str, ...]]:
def get_parser(parser, args) -> tuple[ap.ArgumentParser, tuple[str, ...]]:
"""Check for sub-parsers"""
sub_parsers = {}
for act in parser._get_positional_actions():

View file

@ -102,9 +102,9 @@ class CommandsCache(cabc.Mapping):
name on Windows as a list, conserving the ordering in `PATHEXT`.
Returns a list as `name` being the only item in it on other platforms."""
if ON_WINDOWS:
pathext = self.env.get("PATHEXT", [])
pathext = [""] + self.env.get("PATHEXT", [])
name = name.upper()
return [name + ext for ext in ([""] + pathext)]
return [name + ext for ext in pathext]
else:
return [name]
@ -117,7 +117,7 @@ class CommandsCache(cabc.Mapping):
if os.path.isdir(p):
yield p
def _check_changes(self, paths: tp.Tuple[str, ...]):
def _check_changes(self, paths: tuple[str, ...]):
# did PATH change?
yield self._update_paths_cache(paths)
@ -174,7 +174,7 @@ class CommandsCache(cabc.Mapping):
updated = False
for path in paths:
modified_time = os.stat(path).st_mtime
modified_time = os.path.getmtime(path)
if (
(not self.env.get("ENABLE_COMMANDS_CACHE", True))
or (path not in self._paths_cache)
@ -198,7 +198,7 @@ class CommandsCache(cabc.Mapping):
"""Returns the name that would appear in the cache, if it exists."""
if name is None:
return None
cached = pathbasename(name)
cached = pathbasename(name) if os.pathsep in name else name
if ON_WINDOWS:
keys = self.get_possible_names(cached)
cached = next((k for k in keys if k in self._cmds_cache), None)

View file

@ -130,7 +130,7 @@ class Completer:
completing_contextual_command: bool,
lprefix: int,
custom_lprefix: bool,
) -> tp.Tuple[Completion, int]:
) -> tuple[Completion, int]:
if (
completing_contextual_command
and completion_context.command.is_after_closing_quote
@ -171,7 +171,7 @@ class Completer:
@staticmethod
def generate_completions(
completion_context, old_completer_args, trace: bool
) -> tp.Iterator[tp.Tuple[Completion, int]]:
) -> tp.Iterator[tuple[Completion, int]]:
filter_func = get_filter_function()
for name, func in XSH.completers.items():

View file

@ -100,7 +100,7 @@ def _bash_completion_paths_default():
return bcd
_BASH_COMPLETIONS_PATHS_DEFAULT: tp.Tuple[str, ...] = ()
_BASH_COMPLETIONS_PATHS_DEFAULT: tuple[str, ...] = ()
def _get_bash_completions_source(paths=None):

View file

@ -106,8 +106,8 @@ class ModuleReMatcher(ModuleFinder):
def __init__(self, *names: str):
# list of pre-defined patterns. More can be added using the public method ``.wrap``
self._patterns: tp.Dict[str, str] = {}
self._compiled: tp.Dict[str, tp.Pattern] = {}
self._patterns: dict[str, str] = {}
self._compiled: dict[str, tp.Pattern] = {}
super().__init__(*names)
def search_completer(self, cmd: str, cleaned=False):

View file

@ -10,7 +10,6 @@ import inspect
import os
import re
import sys
import typing as tp
from importlib import import_module
from importlib.machinery import all_suffixes
from time import time
@ -127,7 +126,7 @@ def is_possible_submodule(module, attr):
return inspect.ismodule(obj)
def try_import(mod: str, only_modules=False) -> tp.List[str]:
def try_import(mod: str, only_modules=False) -> list[str]:
"""
Try to import given module and return list of potential completions.
"""

View file

@ -11,7 +11,7 @@ from xonsh.completers.tools import RichCompletion, contextual_command_completer
from xonsh.parsers.completion_context import CommandContext
@functools.lru_cache(maxsize=None)
@functools.cache
def get_man_completions_path() -> Path:
env = XSH.env or {}
datadir = Path(env["XONSH_DATA_DIR"]) / "generated_completions" / "man"
@ -32,7 +32,7 @@ def _get_man_page(cmd: str):
return subprocess.check_output(["col", "-b"], stdin=manpage.stdout, env=env)
@functools.lru_cache(maxsize=None)
@functools.cache
def _man_option_string_regex():
return re.compile(
r"(?:(,\s?)|^|(\sor\s))(?P<option>-[\w]|--[\w-]+)(?=\[?(\s|,|=\w+|$))"

View file

@ -136,7 +136,7 @@ def RICH_COMPLETION_DEFAULTS():
Completion = tp.Union[RichCompletion, str]
CompleterResult = tp.Union[tp.Set[Completion], tp.Tuple[tp.Set[Completion], int], None]
CompleterResult = tp.Union[set[Completion], tuple[set[Completion], int], None]
ContextualCompleter = tp.Callable[[CompletionContext], CompleterResult]

View file

@ -13,9 +13,9 @@ from xonsh.events import events
from xonsh.platform import ON_WINDOWS
from xonsh.tools import get_sep
DIRSTACK: tp.List[str] = []
DIRSTACK: list[str] = []
"""A list containing the currently remembered directories."""
_unc_tempDrives: tp.Dict[str, str] = {}
_unc_tempDrives: dict[str, str] = {}
""" drive: sharePath for temp drive letters we create for UNC mapping"""

View file

@ -797,14 +797,14 @@ class Xettings:
"""
@classmethod
def get_settings(cls) -> tp.Iterator[tp.Tuple[VarKeyType, Var]]:
def get_settings(cls) -> tp.Iterator[tuple[VarKeyType, Var]]:
for var_name, var in vars(cls).items():
if not var_name.startswith("__") and var_name.isupper():
yield var.get_key(var_name), var
@staticmethod
def _get_groups(
cls, _seen: tp.Optional[tp.Set["Xettings"]] = None, *bases: "Xettings"
cls, _seen: tp.Optional[set["Xettings"]] = None, *bases: "Xettings"
):
if _seen is None:
_seen = set()
@ -819,9 +819,7 @@ class Xettings:
@classmethod
def get_groups(
cls,
) -> tp.Iterator[
tp.Tuple[tp.Tuple["Xettings", ...], tp.Tuple[tp.Tuple[VarKeyType, Var], ...]]
]:
) -> tp.Iterator[tuple[tuple["Xettings", ...], tuple[tuple[VarKeyType, Var], ...]]]:
yield from Xettings._get_groups(cls)
@classmethod

View file

@ -191,7 +191,7 @@ class HistoryAlias(xcli.ArgParserAlias):
def show(
self,
session: xcli.Annotated[str, xcli.Arg(nargs="?")] = "session",
slices: xcli.Annotated[tp.List[int], xcli.Arg(nargs="*")] = None,
slices: xcli.Annotated[list[int], xcli.Arg(nargs="*")] = None,
datetime_format: tp.Optional[str] = None,
start_time: tp.Optional[str] = None,
end_time: tp.Optional[str] = None,
@ -371,7 +371,7 @@ class HistoryAlias(xcli.ArgParserAlias):
@staticmethod
def gc(
size: xcli.Annotated[tp.Tuple[int, str], xcli.Arg(nargs=2)] = None,
size: xcli.Annotated[tuple[int, str], xcli.Arg(nargs=2)] = None,
force=False,
blocking=True,
):

View file

@ -35,7 +35,7 @@ _jobs_thread_local = threading.local()
# Task queue for the main thread
# The use_main_jobs context manager uses this variable to access the tasks on
# the main thread.
_tasks_main: tp.Deque[int] = collections.deque()
_tasks_main: collections.deque[int] = collections.deque()
@contextlib.contextmanager
@ -57,7 +57,7 @@ def use_main_jobs():
_jobs_thread_local.jobs = old_jobs
def get_tasks() -> tp.Deque[int]:
def get_tasks() -> collections.deque[int]:
try:
return _jobs_thread_local.tasks
except AttributeError:
@ -68,7 +68,7 @@ def get_tasks() -> tp.Deque[int]:
return _jobs_thread_local.tasks
def get_jobs() -> tp.Dict[int, tp.Dict]:
def get_jobs() -> dict[int, dict]:
try:
return _jobs_thread_local.jobs
except AttributeError:

View file

@ -21,7 +21,6 @@ from xonsh.tokenize import (
ENDMARKER,
ERRORTOKEN,
GREATER,
HAS_WALRUS,
INDENT,
IOREDIRECT,
LESS,
@ -97,6 +96,7 @@ def token_map():
"??": "DOUBLE_QUESTION",
"@$": "ATDOLLAR",
"&": "AMPERSAND",
":=": "COLONEQUAL",
}
for op, typ in _op_map.items():
tm[(OP, op)] = typ
@ -108,8 +108,6 @@ def token_map():
tm[NEWLINE] = "NEWLINE"
tm[INDENT] = "INDENT"
tm[DEDENT] = "DEDENT"
if HAS_WALRUS:
tm[(OP, ":=")] = "COLONEQUAL"
# python 3.10 (backwards and name token compatible) tokens
tm[MATCH] = "MATCH"
tm[CASE] = "CASE"
@ -409,7 +407,7 @@ def _new_token(type, value, pos):
class Lexer:
"""Implements a lexer for the xonsh language."""
_tokens: tp.Optional[tp.Tuple[str, ...]] = None
_tokens: tp.Optional[tuple[str, ...]] = None
def __init__(self, tolerant=False):
"""

View file

@ -634,5 +634,6 @@ def setup(
XSH.aliases.update(aliases)
if xontribs:
xontribs_load(xontribs)
tp = XSH.commands_cache.threadable_predictors
tp.update(threadable_predictors)
if threadable_predictors:
XSH.commands_cache.threadable_predictors.update(threadable_predictors)

View file

@ -175,8 +175,8 @@ def lopen_loc(x):
def hasglobstar(x):
"""Returns True if a node has literal '*' for globbing."""
if isinstance(x, ast.Str):
return "*" in x.s
if ast.is_const_str(x):
return "*" in x.value
elif isinstance(x, list):
for e in x:
if hasglobstar(e):
@ -188,10 +188,10 @@ def hasglobstar(x):
def raise_parse_error(
msg: tp.Union[str, tp.Tuple[str]],
msg: tp.Union[str, tuple[str]],
loc: tp.Optional[Location] = None,
code: tp.Optional[str] = None,
lines: tp.Optional[tp.List[str]] = None,
lines: tp.Optional[list[str]] = None,
):
err_line = None
if loc is None or code is None or lines is None:
@ -636,7 +636,7 @@ class BaseParser:
def xonsh_pathsearch(self, pattern, pymode=False, lineno=None, col=None):
"""Creates the AST node for calling the __xonsh__.pathsearch() function.
The pymode argument indicate if it is called from subproc or python mode"""
pymode = ast.NameConstant(value=pymode, lineno=lineno, col_offset=col)
pymode = ast.const_name(value=pymode, lineno=lineno, col_offset=col)
searchfunc, pattern = RE_SEARCHPATH.match(pattern).groups()
if not searchfunc.startswith("@") and "f" in searchfunc:
pattern_as_str = f"f'''{pattern}'''"
@ -652,7 +652,7 @@ class BaseParser:
except SyntaxError as e:
self._set_error(str(e), self.currloc(lineno=lineno, column=col))
else:
pattern = ast.Str(s=pattern, lineno=lineno, col_offset=col)
pattern = ast.const_str(s=pattern, lineno=lineno, col_offset=col)
pathobj = False
if searchfunc.startswith("@"):
func = searchfunc[1:]
@ -663,7 +663,7 @@ class BaseParser:
func = "__xonsh__.regexsearch"
pathobj = "p" in searchfunc
func = load_attribute_chain(func, lineno=lineno, col=col)
pathobj = ast.NameConstant(value=pathobj, lineno=lineno, col_offset=col)
pathobj = ast.const_name(value=pathobj, lineno=lineno, col_offset=col)
return xonsh_call(
"__xonsh__.pathsearch",
args=[func, pattern, pymode, pathobj],
@ -1804,7 +1804,7 @@ class BaseParser:
end = (p5.lineno, p5.lexpos)
s = self._source_slice(beg, end)
s = textwrap.dedent(s)
p[0] = ast.Str(s=s, lineno=beg[0], col_offset=beg[1])
p[0] = ast.const_str(s=s, lineno=beg[0], col_offset=beg[1])
def p_rawsuite_simple_stmt(self, p):
"""rawsuite : colon_tok nonewline newline_tok"""
@ -1812,7 +1812,7 @@ class BaseParser:
beg = (p1.lineno, p1.lexpos + 1)
end = (p3.lineno, p3.lexpos)
s = self._source_slice(beg, end).strip()
p[0] = ast.Str(s=s, lineno=beg[0], col_offset=beg[1])
p[0] = ast.const_str(s=s, lineno=beg[0], col_offset=beg[1])
def _attach_nodedent_base_rules(self):
toks = set(self.tokens)
@ -2407,22 +2407,22 @@ class BaseParser:
def p_atom_ellip(self, p):
"""atom : ellipsis_tok"""
p1 = p[1]
p[0] = ast.EllipsisNode(lineno=p1.lineno, col_offset=p1.lexpos)
p[0] = ast.Constant(value=..., lineno=p1.lineno, col_offset=p1.lexpos)
def p_atom_none(self, p):
"""atom : none_tok"""
p1 = p[1]
p[0] = ast.NameConstant(value=None, lineno=p1.lineno, col_offset=p1.lexpos)
p[0] = ast.const_name(value=None, lineno=p1.lineno, col_offset=p1.lexpos)
def p_atom_true(self, p):
"""atom : true_tok"""
p1 = p[1]
p[0] = ast.NameConstant(value=True, lineno=p1.lineno, col_offset=p1.lexpos)
p[0] = ast.const_name(value=True, lineno=p1.lineno, col_offset=p1.lexpos)
def p_atom_false(self, p):
"""atom : false_tok"""
p1 = p[1]
p[0] = ast.NameConstant(value=False, lineno=p1.lineno, col_offset=p1.lexpos)
p[0] = ast.const_name(value=False, lineno=p1.lineno, col_offset=p1.lexpos)
def p_atom_pathsearch(self, p):
"""atom : SEARCHPATH"""
@ -2534,7 +2534,7 @@ class BaseParser:
)
elif "p" in prefix:
value_without_p = prefix.replace("p", "") + p1.value[len(prefix) :]
s = ast.Str(
s = ast.const_str(
s=ast.literal_eval(value_without_p),
lineno=p1.lineno,
col_offset=p1.lexpos,
@ -2564,7 +2564,7 @@ class BaseParser:
s = ast.literal_eval(p1.value)
is_bytes = "b" in prefix
is_raw = "r" in prefix
cls = ast.Bytes if is_bytes else ast.Str
cls = ast.const_bytes if is_bytes else ast.const_str
p[0] = cls(s=s, lineno=p1.lineno, col_offset=p1.lexpos, is_raw=is_raw)
def p_string_literal_list(self, p):
@ -2654,7 +2654,7 @@ class BaseParser:
def p_number(self, p):
"""number : number_tok"""
p1 = p[1]
p[0] = ast.Num(
p[0] = ast.const_num(
n=ast.literal_eval(p1.value.replace("_", "")),
lineno=p1.lineno,
col_offset=p1.lexpos,
@ -2711,7 +2711,7 @@ class BaseParser:
else:
msg = "empty macro arguments not allowed"
self._set_error(msg, self.currloc(*beg))
node = ast.Str(s=s, lineno=beg[0], col_offset=beg[1])
node = ast.const_str(s=s, lineno=beg[0], col_offset=beg[1])
elts.append(node)
p0 = ast.Tuple(
elts=elts, ctx=ast.Load(), lineno=p1.lineno, col_offset=p1.lexpos
@ -3165,8 +3165,8 @@ class BaseParser:
return ast.Call(
func=func,
args=[
ast.Str(s=var, lineno=lineno, col_offset=col),
ast.Str(s="", lineno=lineno, col_offset=col),
ast.const_str(s=var, lineno=lineno, col_offset=col),
ast.const_str(s="", lineno=lineno, col_offset=col),
],
keywords=[],
starargs=None,
@ -3178,7 +3178,7 @@ class BaseParser:
def _envvar_by_name(self, var, lineno=None, col=None):
"""Looks up a xonsh variable by name."""
xenv = load_attribute_chain("__xonsh__.env", lineno=lineno, col=col)
idx = ast.Index(value=ast.Str(s=var, lineno=lineno, col_offset=col))
idx = ast.Index(value=ast.const_str(s=var, lineno=lineno, col_offset=col))
return ast.Subscript(
value=xenv, slice=idx, ctx=ast.Load(), lineno=lineno, col_offset=col
)
@ -3220,7 +3220,7 @@ class BaseParser:
| PIPE WS
| WS PIPE WS
"""
p[0] = ast.Str(s="|", lineno=self.lineno, col_offset=self.col)
p[0] = ast.const_str(s="|", lineno=self.lineno, col_offset=self.col)
def p_amper(self, p):
"""
@ -3229,7 +3229,7 @@ class BaseParser:
| AMPERSAND WS
| WS AMPERSAND WS
"""
p[0] = ast.Str(s="&", lineno=self.lineno, col_offset=self.col)
p[0] = ast.const_str(s="&", lineno=self.lineno, col_offset=self.col)
def p_subproc_s2(self, p):
"""
@ -3254,7 +3254,7 @@ class BaseParser:
| subproc pipe subproc_atoms WS
"""
p1 = p[1]
if len(p1) > 1 and hasattr(p1[-2], "s") and p1[-2].s != "|":
if len(p1) > 1 and hasattr(p1[-2], "value") and p1[-2].value != "|":
self._set_error("additional redirect following non-pipe redirect")
cliargs = self._subproc_cliargs(p[3], lineno=self.lineno, col=self.col)
p[0] = p1 + [p[2], cliargs]
@ -3281,9 +3281,9 @@ class BaseParser:
subcmd = self._source_slice((l, c), (p3.lineno, p3.lexpos))
subcmd = subcmd.strip() + "\n"
p0 = [
ast.Str(s="xonsh", lineno=l, col_offset=c),
ast.Str(s="-c", lineno=l, col_offset=c),
ast.Str(s=subcmd, lineno=l, col_offset=c),
ast.const_str(s="xonsh", lineno=l, col_offset=c),
ast.const_str(s="-c", lineno=l, col_offset=c),
ast.const_str(s=subcmd, lineno=l, col_offset=c),
]
for arg in p0:
arg._cliarg_action = "append"
@ -3308,7 +3308,7 @@ class BaseParser:
def _append_subproc_bang_empty(self, p):
"""Appends an empty string in subprocess mode to the argument list."""
p3 = p[3]
node = ast.Str(s="", lineno=p3.lineno, col_offset=p3.lexpos + 1)
node = ast.const_str(s="", lineno=p3.lineno, col_offset=p3.lexpos + 1)
p[2][-1].elts.append(node)
def _append_subproc_bang(self, p):
@ -3319,7 +3319,7 @@ class BaseParser:
beg = (p3.lineno, p3.lexpos + 1)
end = (p5.lineno, p5.lexpos)
s = self._source_slice(beg, end).strip()
node = ast.Str(s=s, lineno=beg[0], col_offset=beg[1])
node = ast.const_str(s=s, lineno=beg[0], col_offset=beg[1])
p[2][-1].elts.append(node)
def p_subproc_atom_uncaptured(self, p):
@ -3373,7 +3373,7 @@ class BaseParser:
)
p0 = ast.Call(
func=func,
args=[p[2], ast.Str(s="", lineno=lineno, col_offset=col)],
args=[p[2], ast.const_str(s="", lineno=lineno, col_offset=col)],
keywords=[],
starargs=None,
kwargs=None,
@ -3436,7 +3436,7 @@ class BaseParser:
| RSHIFT
| IOREDIRECT
"""
p0 = ast.Str(s=p[1], lineno=self.lineno, col_offset=self.col)
p0 = ast.const_str(s=p[1], lineno=self.lineno, col_offset=self.col)
p0._cliarg_action = "append"
p[0] = p0
@ -3492,8 +3492,10 @@ class BaseParser:
# This glues the string together after parsing
p1 = p[1]
p2 = p[2]
if isinstance(p1, ast.Str) and isinstance(p2, ast.Str):
p0 = ast.Str(p1.s + p2.s, lineno=p1.lineno, col_offset=p1.col_offset)
if ast.is_const_str(p1) and ast.is_const_str(p2):
p0 = ast.const_str(
p1.value + p2.value, lineno=p1.lineno, col_offset=p1.col_offset
)
elif isinstance(p1, list):
if isinstance(p2, list):
p1.extend(p2)
@ -3547,7 +3549,7 @@ class BaseParser:
# Many tokens cannot be part of this rule, such as $, ', ", ()
# Use a string atom instead. See above attachment functions
p1 = p[1]
p[0] = ast.Str(s=p1.value, lineno=p1.lineno, col_offset=p1.lexpos)
p[0] = ast.const_str(s=p1.value, lineno=p1.lineno, col_offset=p1.lexpos)
def p_envvar_assign_left(self, p):
"""envvar_assign_left : dollar_name_tok EQUALS"""

View file

@ -7,12 +7,9 @@ import re
from collections import defaultdict
from typing import (
Any,
Dict,
Generic,
List,
NamedTuple,
Optional,
Tuple,
TypeVar,
Union,
cast,
@ -49,7 +46,7 @@ class CommandContext(NamedTuple):
The object containing the current command's completion context.
"""
args: Tuple[CommandArg, ...]
args: tuple[CommandArg, ...]
"""The arguments in the command"""
arg_index: int # ``-1`` if the cursor isn't in the command.
"""The current argument's index"""
@ -116,7 +113,7 @@ class PythonContext(NamedTuple):
"""The cursor's index in the multiline code"""
is_sub_expression: bool = False
"""Whether this is a sub expression (``@(...)``)"""
ctx: Optional[Dict[str, Any]] = None
ctx: Optional[dict[str, Any]] = None
"""Objects in the current execution context"""
def __repr__(self):
@ -145,7 +142,7 @@ class CompletionContext(NamedTuple):
This will be ``None`` when we can't be completing python, e.g. ``echo $(<TAB>``.
"""
def with_ctx(self, ctx: Dict[str, Any]) -> "CompletionContext":
def with_ctx(self, ctx: dict[str, Any]) -> "CompletionContext":
if self.python is not None:
return self._replace(python=self.python._replace(ctx=ctx))
return self
@ -256,7 +253,7 @@ class Spanned(Generic[T]):
)
Commands = Spanned[List[Spanned[CommandContext]]]
Commands = Spanned[list[Spanned[CommandContext]]]
ArgContext = Union[Spanned[CommandContext], Commands, Spanned[PythonContext]]
ExpandableObject = Union[Spanned[CommandArg], ArgContext]
@ -376,7 +373,7 @@ class CompletionContextParser:
self,
multiline_text: str,
cursor_index: int,
ctx: Optional[Dict[str, Any]] = None,
ctx: Optional[dict[str, Any]] = None,
) -> Optional[CompletionContext]:
"""Returns a CompletionContext from a command line.
@ -535,7 +532,7 @@ class CompletionContextParser:
|
"""
if len(p) == 2:
spanned_args: List[Spanned[CommandArg]] = p[1]
spanned_args: list[Spanned[CommandArg]] = p[1]
span = slice(spanned_args[0].span.start, spanned_args[-1].span.stop)
else:
# empty command
@ -722,7 +719,7 @@ class CompletionContextParser:
def p_args_many(self, p):
"""args : args arg"""
args: List[Spanned[CommandArg]] = p[1]
args: list[Spanned[CommandArg]] = p[1]
new_arg: Spanned[CommandArg] = p[2]
last_arg: Spanned[CommandArg] = args[-1]
@ -990,7 +987,7 @@ class CompletionContextParser:
def handle_command_arg(
self, arg: Spanned[CommandArg]
) -> Tuple[CommandContext, Optional[Union[CommandContext, PythonContext]]]:
) -> tuple[CommandContext, Optional[Union[CommandContext, PythonContext]]]:
"""Create a command context from an arg which contains the cursor.
Also return the internal cursor context if it exists.
`args`, `arg_index`, and `subcmd_opening` aren't set by this function
@ -1105,7 +1102,7 @@ class CompletionContextParser:
def process_string_segment(
self, string: str, span: slice
) -> Tuple[str, Optional[int]]:
) -> tuple[str, Optional[int]]:
"""Process a string segment:
1. Return a relative_cursor if it's inside the span (for ``Spanned.cursor_context``).
2. Handle line continuations in the string.

View file

@ -2,6 +2,8 @@ import ast
import collections
import keyword
from xonsh import ast as xast
_all_keywords = frozenset(keyword.kwlist)
@ -20,7 +22,14 @@ def _not_assignable(x, augassign=False):
res = _not_assignable(i)
if res is not None:
return res
elif isinstance(x, (ast.Set, ast.Dict, ast.Num, ast.Str, ast.Bytes)):
elif any(
[
isinstance(x, (ast.Set, ast.Dict)),
xast.is_const_num(x),
xast.is_const_str(x),
xast.is_const_bytes(x),
]
):
return "literal"
elif isinstance(x, ast.Call):
return "function call"
@ -42,7 +51,7 @@ def _not_assignable(x, augassign=False):
return "comparison"
elif isinstance(x, ast.Name) and x.id in _all_keywords:
return "keyword"
elif isinstance(x, ast.NameConstant):
elif xast.is_const_name(x):
return "keyword"

View file

@ -9,17 +9,12 @@ from xonsh.platform import PYTHON_VERSION_INFO
@lazyobject
def RE_FSTR_FIELD_WRAPPER():
if PYTHON_VERSION_INFO > (3, 8):
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\))\s*[^=]")
else:
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\))")
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\))\s*[^=]")
if PYTHON_VERSION_INFO > (3, 8):
@lazyobject
def RE_FSTR_SELF_DOC_FIELD_WRAPPER():
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\)\s*)=")
@lazyobject
def RE_FSTR_SELF_DOC_FIELD_WRAPPER():
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\)\s*)=")
class FStringAdaptor:
@ -64,12 +59,19 @@ class FStringAdaptor:
except SyntaxError as e:
# The e.text attribute is expected to contain the failing
# expression, e.g. "($HOME)" for f"{$HOME}" string.
if e.text is None or e.text[0] != "(":
raise
error_expr = e.text.strip()[1:-1]
epos = template.find(error_expr)
if epos < 0:
raise
if PYTHON_VERSION_INFO < (3, 12):
if (e.text is None) or (e.text[0] != "("):
raise
error_expr = e.text.strip()[1:-1]
epos = template.find(error_expr)
if epos < 0:
raise
else:
# Python 3.12+ reports the error differently.
# todo: implement a better way to get the error expression
raise RuntimeError("Unsupported fstring syntax") from e
# We can olny get here in the case of handled SyntaxError.
# Patch the last error and start over.
xonsh_field = (error_expr, self.filename if self.filename else None)
@ -87,8 +89,8 @@ class FStringAdaptor:
for node in ast.walk(self.res):
if isinstance(node, ast.Constant) and isinstance(node.value, str):
value = node.value
elif isinstance(node, ast.Str):
value = node.s
elif ast.is_const_str(node):
value = node.value
else:
continue
@ -110,8 +112,8 @@ class FStringAdaptor:
for node in ast.walk(self.res):
if isinstance(node, ast.Constant) and isinstance(node.value, str):
value = node.value
elif isinstance(node, ast.Str):
value = node.s
elif ast.is_const_str(node):
value = node.value
else:
continue
@ -122,8 +124,9 @@ class FStringAdaptor:
if field is None:
continue
value = value.replace(match.group(1), field[0], 1)
if isinstance(node, ast.Str):
node.s = value
if ast.is_const_str(node):
node.value = value
else:
node.value = value
@ -162,22 +165,20 @@ class FStringAdaptor:
col_offset=col_offset,
)
node.args[0] = field_node
elif isinstance(node.args[0], ast.Num):
field = self.fields.pop(node.args[0].n, None)
elif ast.is_const_num(node.args[0]):
field = self.fields.pop(node.args[0].value, None)
if field is None:
continue
lineno = node.args[0].lineno
col_offset = node.args[0].col_offset
elts = [ast.Str(s=field[0], lineno=lineno, col_offset=col_offset)]
elts = [ast.const_str(s=field[0], lineno=lineno, col_offset=col_offset)]
if field[1] is not None:
elts.append(
ast.Str(s=field[1], lineno=lineno, col_offset=col_offset)
ast.const_str(s=field[1], lineno=lineno, col_offset=col_offset)
)
else:
elts.append(
ast.NameConstant(
value=None, lineno=lineno, col_offset=col_offset
)
ast.const_name(value=None, lineno=lineno, col_offset=col_offset)
)
field_node = ast.Tuple(
elts=elts, ctx=ast.Load(), lineno=lineno, col_offset=col_offset

View file

@ -30,7 +30,7 @@ class _ParsedToken(tp.NamedTuple):
class ParsedTokens(tp.NamedTuple):
tokens: tp.List[_ParsedToken]
tokens: list[_ParsedToken]
template: tp.Union[str, tp.Callable]
def process(self) -> str:

View file

@ -70,9 +70,9 @@ class AsyncPrompt:
self.executor = executor
# (Key: the future object) that is created for the (value: index/field_name) in the tokens list
self.futures: tp.Dict[
self.futures: dict[
concurrent.futures.Future,
tp.Tuple[str, tp.Optional[int], tp.Optional[str], tp.Optional[str]],
tuple[str, tp.Optional[int], tp.Optional[str], tp.Optional[str]],
] = {}
def start_update(self, on_complete):
@ -160,7 +160,7 @@ class PromptUpdator:
def __init__(self, shell):
from xonsh.ptk_shell.shell import PromptToolkitShell
self.prompts: tp.Dict[str, AsyncPrompt] = {}
self.prompts: dict[str, AsyncPrompt] = {}
self.shell: PromptToolkitShell = shell
self.executor = Executor()
self.futures = {}

View file

@ -3,7 +3,6 @@ import os
import re
import stat
import sys
import typing as tp
from collections import ChainMap
from collections.abc import MutableMapping
from keyword import iskeyword
@ -66,7 +65,7 @@ from xonsh.tools import (
Color = Token.Color # alias to new color token namespace
# style rules that are not supported by pygments are stored here
NON_PYGMENTS_RULES: tp.Dict[str, tp.Dict[str, str]] = {}
NON_PYGMENTS_RULES: dict[str, dict[str, str]] = {}
# style modifiers not handled by pygments (but supported by ptk)
PTK_SPECIFIC_VALUES = frozenset(
@ -1490,7 +1489,7 @@ def on_lscolors_change(key, oldvalue, newvalue, **kwargs):
events.on_lscolors_change(on_lscolors_change)
def color_file(file_path: str, path_stat: os.stat_result) -> tp.Tuple[_TokenType, str]:
def color_file(file_path: str, path_stat: os.stat_result) -> tuple[_TokenType, str]:
"""Determine color to use for file *approximately* as ls --color would,
given lstat() results and its path.
@ -1750,7 +1749,7 @@ class XonshConsoleLexer(XonshLexer):
name = "Xonsh console lexer"
aliases = ["xonshcon"]
filenames: tp.List[str] = []
filenames: list[str] = []
tokens = {
"root": [

View file

@ -7,7 +7,6 @@ import importlib
import os
import sys
import types
import typing as tp
from pathlib import Path
from traceback import extract_tb, format_list
from unittest.mock import MagicMock
@ -129,7 +128,7 @@ def xonsh_execer_parse(xonsh_execer):
@pytest.fixture
def mock_executables_in(xession, tmp_path, monkeypatch):
def _factory(binaries: tp.List[str]):
def _factory(binaries: list[str]):
xession.env["PATH"] = [tmp_path]
exec_mock = MagicMock(return_value=binaries)
monkeypatch.setattr(commands_cache, "executables_in", exec_mock)

View file

@ -129,12 +129,14 @@ def nodes_equal(x, y):
for (xname, xval), (yname, yval) in zip(ast.iter_fields(x), ast.iter_fields(y)):
assert (
xname == yname
), "Ast nodes fields differ : {} (of type {}) != {} (of type {})".format(
), "Ast nodes field names differ : {} (of type {}) != {} (of type {})".format(
xname,
type(xval),
yname,
type(yval),
)
if isinstance(x, ast.Constant) and xname == "kind":
continue
assert type(xval) == type(
yval
), "Ast nodes fields differ : {} (of type {}) != {} (of type {})".format(

View file

@ -30,6 +30,7 @@ from token import (
CIRCUMFLEX,
CIRCUMFLEXEQUAL,
COLON,
COLONEQUAL,
COMMA,
DEDENT,
DOT,
@ -83,10 +84,6 @@ from token import (
from xonsh.lazyasd import LazyObject
from xonsh.platform import PYTHON_VERSION_INFO
HAS_WALRUS = PYTHON_VERSION_INFO > (3, 8)
if HAS_WALRUS:
from token import COLONEQUAL # type:ignore
cookie_re = LazyObject(
lambda: re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)", re.ASCII),
globals(),
@ -126,10 +123,7 @@ else:
ADDSPACE_TOKS = (NAME, NUMBER) # type:ignore
del token # must clean up token
if HAS_WALRUS:
AUGASSIGN_OPS = r"[+\-*/%&@|^=<>:]=?"
else:
AUGASSIGN_OPS = r"[+\-*/%&@|^=<>]=?"
AUGASSIGN_OPS = r"[+\-*/%&@|^=<>:]=?"
COMMENT = N_TOKENS
tok_name[COMMENT] = "COMMENT"
@ -187,7 +181,7 @@ for v in _xonsh_tokens.values():
__all__.append(v)
del _glbs, v
EXACT_TOKEN_TYPES: tp.Dict[str, tp.Union[str, int]] = {
EXACT_TOKEN_TYPES: dict[str, tp.Union[str, int]] = {
"(": LPAR,
")": RPAR,
"[": LSQB,
@ -231,9 +225,8 @@ EXACT_TOKEN_TYPES: tp.Dict[str, tp.Union[str, int]] = {
"//": DOUBLESLASH,
"//=": DOUBLESLASHEQUAL,
"@": AT,
":=": COLONEQUAL,
}
if HAS_WALRUS:
EXACT_TOKEN_TYPES[":="] = COLONEQUAL
EXACT_TOKEN_TYPES.update(_xonsh_tokens)

View file

@ -3,7 +3,7 @@ import os
import re
import typing as tp
RENDERERS: tp.List[tp.Callable] = []
RENDERERS: list[tp.Callable] = []
def renderer(f):

View file

@ -18,7 +18,7 @@ from xonsh.webconfig import tags as t
from xonsh.webconfig.file_writes import insert_into_xonshrc
from xonsh.webconfig.routes import Routes
RENDERERS: tp.List[tp.Callable] = []
RENDERERS: list[tp.Callable] = []
class XonshConfigHTTPRequestHandler(server.SimpleHTTPRequestHandler):

View file

@ -8,7 +8,7 @@ from ..built_ins import XonshSession
from .file_writes import insert_into_xonshrc
if TYPE_CHECKING:
from typing import Type
pass
import logging
from urllib import parse
@ -19,7 +19,7 @@ from . import xonsh_data
class Routes:
path: str
registry: "dict[str, Type[Routes]]" = {}
registry: "dict[str, type[Routes]]" = {}
navbar = False
nav_title: "str|None" = None
err_msgs: "list" = []

View file

@ -2,7 +2,7 @@ import logging
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Iterable
from collections.abc import Iterable
import xml.etree.ElementTree as etree
from functools import partial

View file

@ -21,7 +21,7 @@ from xonsh.tools import backup_file, print_color, to_bool, to_bool_or_break
class Node:
"""Base type of all nodes."""
attrs: tp.Union[tp.Tuple[str, ...], str] = ()
attrs: tp.Union[tuple[str, ...], str] = ()
def __str__(self):
return PrettyFormatter(self).visit()
@ -81,7 +81,7 @@ class Question(Node):
class Input(Node):
"""Gets input from the user."""
attrs: tp.Tuple[str, ...] = (
attrs: tuple[str, ...] = (
"prompt",
"converter",
"show_conversion",
@ -257,7 +257,7 @@ class StateFile(Input):
given file name. This node type is likely not useful on its own.
"""
attrs: tp.Tuple[str, ...] = ("default_file", "check", "ask_filename")
attrs: tuple[str, ...] = ("default_file", "check", "ask_filename")
def __init__(self, default_file=None, check=True, ask_filename=True):
"""

View file

@ -138,7 +138,7 @@ WIZARD_TAIL = """
Thanks for using the xonsh configuration wizard!"""
_XONFIG_SOURCE_FOREIGN_SHELL_COMMAND: tp.Dict[str, str] = collections.defaultdict(
_XONFIG_SOURCE_FOREIGN_SHELL_COMMAND: dict[str, str] = collections.defaultdict(
lambda: "source-foreign", bash="source-bash", cmd="source-cmd", zsh="source-zsh"
)
@ -363,7 +363,7 @@ def _xontrib_path(visitor=None, node=None, val=None):
return ("xontribs", len(visitor.state.get("xontribs", ())))
def make_xontrib(xon_item: tp.Tuple[str, Xontrib]):
def make_xontrib(xon_item: tuple[str, Xontrib]):
"""Makes a message and StoreNonEmpty node for a xontrib."""
name, xontrib = xon_item
name = name or "<unknown-xontrib-name>"
@ -526,7 +526,7 @@ def _info(
reports results as json
"""
env = XSH.env
data: tp.List[tp.Any] = [("xonsh", XONSH_VERSION)]
data: list[tp.Any] = [("xonsh", XONSH_VERSION)]
hash_, date_ = githash()
if hash_:
data.append(("Git SHA", hash_))

View file

@ -76,7 +76,7 @@ def get_module_docstring(module: str) -> str:
return ""
def get_xontribs() -> tp.Dict[str, Xontrib]:
def get_xontribs() -> dict[str, Xontrib]:
"""Return xontrib definitions lazily."""
return dict(_get_installed_xontribs())
@ -160,7 +160,7 @@ def xontrib_context(name, full_module=False):
return ctx
def prompt_xontrib_install(names: tp.List[str]):
def prompt_xontrib_install(names: list[str]):
"""Returns a formatted string with name of xontrib package to prompt user"""
return (
"The following xontribs are enabled but not installed: \n"