mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-04 08:24:40 +01:00
refactoring: moving tokenize to parsers (#5596)
* moving tokenize to parsers * moving tokenize to parsers * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: a <1@1.1> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
d08248f6aa
commit
bdfa67c637
6 changed files with 6 additions and 6 deletions
|
@ -226,7 +226,7 @@ convention = "numpy"
|
|||
"xonsh/history.py" = ["F821"]
|
||||
"xonsh/parsers/lexer.py" = ["E741"]
|
||||
"xonsh/parsers/completion_context.py" = ["B018"]
|
||||
"xonsh/lib/tokenize.py" = [
|
||||
"xonsh/parsers/tokenize.py" = [
|
||||
"F821",
|
||||
"F841",
|
||||
"B904" # Within an `except` clause, raise exceptions with `raise ... from err`
|
||||
|
|
|
@ -18,7 +18,7 @@ import types
|
|||
from xonsh.lib.lazyasd import LazyObject
|
||||
from xonsh.lib.lazyimps import pyghooks, pygments
|
||||
from xonsh.lib.openpy import read_py_file
|
||||
from xonsh.lib.tokenize import detect_encoding
|
||||
from xonsh.parsers.tokenize import detect_encoding
|
||||
from xonsh.platform import HAS_PYGMENTS
|
||||
from xonsh.style_tools import partial_color_tokenize
|
||||
from xonsh.tools import cast_unicode, format_color, indent, print_color, safe_hasattr
|
||||
|
|
|
@ -16,7 +16,7 @@ import io
|
|||
import re
|
||||
|
||||
from xonsh.lib.lazyasd import LazyObject
|
||||
from xonsh.lib.tokenize import detect_encoding, tokopen
|
||||
from xonsh.parsers.tokenize import detect_encoding, tokopen
|
||||
|
||||
cookie_comment_re = LazyObject(
|
||||
lambda: re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE),
|
||||
|
|
|
@ -11,13 +11,13 @@ from collections.abc import Iterable, Mapping, Sequence
|
|||
from threading import Thread
|
||||
|
||||
from xonsh.lib.lazyasd import LazyObject
|
||||
from xonsh.lib.tokenize import SearchPath, StringPrefix
|
||||
from xonsh.parsers import ast
|
||||
from xonsh.parsers.ast import has_elts, load_attribute_chain, xonsh_call
|
||||
from xonsh.parsers.context_check import check_contexts
|
||||
from xonsh.parsers.fstring_adaptor import FStringAdaptor
|
||||
from xonsh.parsers.lexer import Lexer, LexToken
|
||||
from xonsh.parsers.ply import yacc
|
||||
from xonsh.parsers.tokenize import SearchPath, StringPrefix
|
||||
from xonsh.platform import PYTHON_VERSION_INFO
|
||||
|
||||
RE_SEARCHPATH = LazyObject(lambda: re.compile(SearchPath), globals(), "RE_SEARCHPATH")
|
||||
|
|
|
@ -11,7 +11,8 @@ import re
|
|||
import typing as tp
|
||||
|
||||
from xonsh.lib.lazyasd import lazyobject
|
||||
from xonsh.lib.tokenize import (
|
||||
from xonsh.parsers.ply.lex import LexToken
|
||||
from xonsh.parsers.tokenize import (
|
||||
CASE,
|
||||
COMMENT,
|
||||
DEDENT,
|
||||
|
@ -36,7 +37,6 @@ from xonsh.lib.tokenize import (
|
|||
TokenError,
|
||||
tokenize,
|
||||
)
|
||||
from xonsh.parsers.ply.lex import LexToken
|
||||
from xonsh.platform import PYTHON_VERSION_INFO
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue