Merge remote-tracking branch 'origin/main'

This commit is contained in:
a 2024-07-12 14:00:21 +02:00
commit 03499cb263
6 changed files with 6 additions and 6 deletions

View file

@ -226,7 +226,7 @@ convention = "numpy"
"xonsh/history.py" = ["F821"]
"xonsh/parsers/lexer.py" = ["E741"]
"xonsh/parsers/completion_context.py" = ["B018"]
"xonsh/lib/tokenize.py" = [
"xonsh/parsers/tokenize.py" = [
"F821",
"F841",
"B904" # Within an `except` clause, raise exceptions with `raise ... from err`

View file

@ -18,7 +18,7 @@ import types
from xonsh.lib.lazyasd import LazyObject
from xonsh.lib.lazyimps import pyghooks, pygments
from xonsh.lib.openpy import read_py_file
from xonsh.lib.tokenize import detect_encoding
from xonsh.parsers.tokenize import detect_encoding
from xonsh.platform import HAS_PYGMENTS
from xonsh.style_tools import partial_color_tokenize
from xonsh.tools import cast_unicode, format_color, indent, print_color, safe_hasattr

View file

@ -16,7 +16,7 @@ import io
import re
from xonsh.lib.lazyasd import LazyObject
from xonsh.lib.tokenize import detect_encoding, tokopen
from xonsh.parsers.tokenize import detect_encoding, tokopen
cookie_comment_re = LazyObject(
lambda: re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE),

View file

@ -11,13 +11,13 @@ from collections.abc import Iterable, Mapping, Sequence
from threading import Thread
from xonsh.lib.lazyasd import LazyObject
from xonsh.lib.tokenize import SearchPath, StringPrefix
from xonsh.parsers import ast
from xonsh.parsers.ast import has_elts, load_attribute_chain, xonsh_call
from xonsh.parsers.context_check import check_contexts
from xonsh.parsers.fstring_adaptor import FStringAdaptor
from xonsh.parsers.lexer import Lexer, LexToken
from xonsh.parsers.ply import yacc
from xonsh.parsers.tokenize import SearchPath, StringPrefix
from xonsh.platform import PYTHON_VERSION_INFO
RE_SEARCHPATH = LazyObject(lambda: re.compile(SearchPath), globals(), "RE_SEARCHPATH")

View file

@ -11,7 +11,8 @@ import re
import typing as tp
from xonsh.lib.lazyasd import lazyobject
from xonsh.lib.tokenize import (
from xonsh.parsers.ply.lex import LexToken
from xonsh.parsers.tokenize import (
CASE,
COMMENT,
DEDENT,
@ -36,7 +37,6 @@ from xonsh.lib.tokenize import (
TokenError,
tokenize,
)
from xonsh.parsers.ply.lex import LexToken
from xonsh.platform import PYTHON_VERSION_INFO