diff --git a/pyproject.toml b/pyproject.toml index 45bd72c9a..0384cb1f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -226,7 +226,7 @@ convention = "numpy" "xonsh/history.py" = ["F821"] "xonsh/parsers/lexer.py" = ["E741"] "xonsh/parsers/completion_context.py" = ["B018"] -"xonsh/lib/tokenize.py" = [ +"xonsh/parsers/tokenize.py" = [ "F821", "F841", "B904" # Within an `except` clause, raise exceptions with `raise ... from err` diff --git a/xonsh/lib/inspectors.py b/xonsh/lib/inspectors.py index aeb21b7fe..0de3e1ea6 100644 --- a/xonsh/lib/inspectors.py +++ b/xonsh/lib/inspectors.py @@ -18,7 +18,7 @@ import types from xonsh.lib.lazyasd import LazyObject from xonsh.lib.lazyimps import pyghooks, pygments from xonsh.lib.openpy import read_py_file -from xonsh.lib.tokenize import detect_encoding +from xonsh.parsers.tokenize import detect_encoding from xonsh.platform import HAS_PYGMENTS from xonsh.style_tools import partial_color_tokenize from xonsh.tools import cast_unicode, format_color, indent, print_color, safe_hasattr diff --git a/xonsh/lib/openpy.py b/xonsh/lib/openpy.py index dab87a7a1..e16d35404 100644 --- a/xonsh/lib/openpy.py +++ b/xonsh/lib/openpy.py @@ -16,7 +16,7 @@ import io import re from xonsh.lib.lazyasd import LazyObject -from xonsh.lib.tokenize import detect_encoding, tokopen +from xonsh.parsers.tokenize import detect_encoding, tokopen cookie_comment_re = LazyObject( lambda: re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE), diff --git a/xonsh/parsers/base.py b/xonsh/parsers/base.py index f519b0d90..c1223cf83 100644 --- a/xonsh/parsers/base.py +++ b/xonsh/parsers/base.py @@ -11,13 +11,13 @@ from collections.abc import Iterable, Mapping, Sequence from threading import Thread from xonsh.lib.lazyasd import LazyObject -from xonsh.lib.tokenize import SearchPath, StringPrefix from xonsh.parsers import ast from xonsh.parsers.ast import has_elts, load_attribute_chain, xonsh_call from xonsh.parsers.context_check import check_contexts from xonsh.parsers.fstring_adaptor import FStringAdaptor from xonsh.parsers.lexer import Lexer, LexToken from xonsh.parsers.ply import yacc +from xonsh.parsers.tokenize import SearchPath, StringPrefix from xonsh.platform import PYTHON_VERSION_INFO RE_SEARCHPATH = LazyObject(lambda: re.compile(SearchPath), globals(), "RE_SEARCHPATH") diff --git a/xonsh/parsers/lexer.py b/xonsh/parsers/lexer.py index 3b3ed1fda..4e0d9dd2c 100644 --- a/xonsh/parsers/lexer.py +++ b/xonsh/parsers/lexer.py @@ -11,7 +11,8 @@ import re import typing as tp from xonsh.lib.lazyasd import lazyobject -from xonsh.lib.tokenize import ( +from xonsh.parsers.ply.lex import LexToken +from xonsh.parsers.tokenize import ( CASE, COMMENT, DEDENT, @@ -36,7 +37,6 @@ from xonsh.lib.tokenize import ( TokenError, tokenize, ) -from xonsh.parsers.ply.lex import LexToken from xonsh.platform import PYTHON_VERSION_INFO diff --git a/xonsh/lib/tokenize.py b/xonsh/parsers/tokenize.py similarity index 100% rename from xonsh/lib/tokenize.py rename to xonsh/parsers/tokenize.py