mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-04 08:24:40 +01:00
Merge branch 'master' into is_3622
This commit is contained in:
commit
f1d703d096
23 changed files with 679 additions and 128 deletions
|
@ -219,7 +219,7 @@
|
|||
</li>
|
||||
</ul>
|
||||
<div class="tab-content" id="myTabContent">
|
||||
<div class="tab-pane fade" id="home" role="tabpanel" aria-labelledby="home-tab">
|
||||
<div class="tab-pane fade show active" id="home" role="tabpanel" aria-labelledby="home-tab">
|
||||
<div class="row recent_update_text">
|
||||
<div class="col-lg-6">
|
||||
<div class="chart_img" style="height:600px;">
|
||||
|
@ -278,7 +278,7 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="tab-pane fade show active" id="contact" role="tabpanel" aria-labelledby="contact-tab">
|
||||
<div class="tab-pane fade" id="contact" role="tabpanel" aria-labelledby="contact-tab">
|
||||
<div class="row recent_update_text">
|
||||
<div class="col-lg-6">
|
||||
<div class="chart_img" style="height:600px;">
|
||||
|
|
29
news/ansi_prompt.rst
Normal file
29
news/ansi_prompt.rst
Normal file
|
@ -0,0 +1,29 @@
|
|||
**Added:**
|
||||
|
||||
* Support for ANSI escape codes in ``$PROMPT``/``$RIGHT_PROMPT``. In this way 3rd party prompt generators like ``powerline`` or ``starship`` can be used to set the prompt. ANSI escape codes might be mixed with the normal formatting (like ``{BOLD_GREEN}``) and *prompt variables* (like ``{user}``) should work as well.
|
||||
For example:
|
||||
::
|
||||
|
||||
$PROMPT=lambda: $(starship prompt)
|
||||
$RIGHT_PROMPT="\x1b[33m{hostname} {GREEN}> "
|
||||
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
24
news/enviter.rst
Normal file
24
news/enviter.rst
Normal file
|
@ -0,0 +1,24 @@
|
|||
**Added:**
|
||||
|
||||
* New ``Env.rawkeys()`` iterator for iterating over all keys in an environment,
|
||||
not just the string keys like with ``__iter__()``.
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
25
news/fstrings.rst
Normal file
25
news/fstrings.rst
Normal file
|
@ -0,0 +1,25 @@
|
|||
**Added:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* Xonsh can now fully handle special Xonsh syntax within f-strings, including
|
||||
environmnent variables within ``${}`` operator and captured subprocess
|
||||
expansion within f-string expressions.
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
28
news/nothreadrc.rst
Normal file
28
news/nothreadrc.rst
Normal file
|
@ -0,0 +1,28 @@
|
|||
**Added:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* Run control files are now read in with ``$THREAD_SUBPROCS`` off.
|
||||
This prevents a weird error when starting xonsh from Bash (and
|
||||
possibly other shells) where the top-level xonsh process would
|
||||
be stopped and placed into the background during startup. It
|
||||
may be necessary to set ``$THREAD_SUBPROCS=False`` in downstream
|
||||
xonsh scripts and modules.
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
24
news/pip-files.rst
Normal file
24
news/pip-files.rst
Normal file
|
@ -0,0 +1,24 @@
|
|||
**Added:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* Fixed installation issues where generated files (like the parser table and
|
||||
amalgamated modules) were not installed.
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
24
news/unlocked.rst
Normal file
24
news/unlocked.rst
Normal file
|
@ -0,0 +1,24 @@
|
|||
**Added:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* Addressed robustness issue with ``"locked"`` history key not
|
||||
being present at startup.
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
23
news/vox-env-fix.rst
Normal file
23
news/vox-env-fix.rst
Normal file
|
@ -0,0 +1,23 @@
|
|||
**Added:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Changed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Deprecated:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Removed:**
|
||||
|
||||
* <news item>
|
||||
|
||||
**Fixed:**
|
||||
|
||||
* ``vox`` xontrib works again with the new environment defaults.
|
||||
|
||||
**Security:**
|
||||
|
||||
* <news item>
|
34
setup.py
34
setup.py
|
@ -17,6 +17,7 @@ from setuptools import setup, find_packages
|
|||
from setuptools.command.sdist import sdist
|
||||
from setuptools.command.install import install
|
||||
from setuptools.command.develop import develop
|
||||
from setuptools.command.build_py import build_py
|
||||
from setuptools.command.install_scripts import install_scripts
|
||||
|
||||
try:
|
||||
|
@ -195,6 +196,20 @@ def restore_version():
|
|||
f.write(upd)
|
||||
|
||||
|
||||
class xbuild_py(build_py):
|
||||
"""Xonsh specialization of setuptools build_py class."""
|
||||
|
||||
def run(self):
|
||||
clean_tables()
|
||||
build_tables()
|
||||
amalgamate_source()
|
||||
# add dirty version number
|
||||
dirty = dirty_version()
|
||||
super().run()
|
||||
if dirty:
|
||||
restore_version()
|
||||
|
||||
|
||||
class xinstall(install):
|
||||
"""Xonsh specialization of setuptools install class.
|
||||
For production, let setuptools generate the
|
||||
|
@ -217,6 +232,7 @@ class xinstall(install):
|
|||
|
||||
traceback.print_exc()
|
||||
print("Installing Jupyter hook failed.")
|
||||
|
||||
super().run()
|
||||
if dirty:
|
||||
restore_version()
|
||||
|
@ -230,6 +246,7 @@ class xsdist(sdist):
|
|||
build_tables()
|
||||
amalgamate_source()
|
||||
dirty = dirty_version()
|
||||
files.extend(TABLES)
|
||||
super().make_release_tree(basedir, files)
|
||||
if dirty:
|
||||
restore_version()
|
||||
|
@ -279,18 +296,15 @@ class install_scripts_rewrite(install_scripts):
|
|||
|
||||
|
||||
# The custom install needs to be used on Windows machines
|
||||
cmdclass = {
|
||||
"install": xinstall,
|
||||
"sdist": xsdist,
|
||||
"build_py": xbuild_py,
|
||||
}
|
||||
if os.name == "nt":
|
||||
cmdclass = {
|
||||
"install": xinstall,
|
||||
"sdist": xsdist,
|
||||
"install_scripts": install_scripts_quoted_shebang,
|
||||
}
|
||||
cmdclass["install_scripts"] = install_scripts_quoted_shebang
|
||||
else:
|
||||
cmdclass = {
|
||||
"install": xinstall,
|
||||
"sdist": xsdist,
|
||||
"install_scripts": install_scripts_rewrite,
|
||||
}
|
||||
cmdclass["install_scripts"] = install_scripts_rewrite
|
||||
|
||||
|
||||
class xdevelop(develop):
|
||||
|
|
|
@ -17,6 +17,7 @@ from xonsh.built_ins import (
|
|||
enter_macro,
|
||||
path_literal,
|
||||
_BuiltIns,
|
||||
eval_fstring_field,
|
||||
)
|
||||
from xonsh.execer import Execer
|
||||
from xonsh.jobs import tasks
|
||||
|
@ -132,6 +133,7 @@ def xonsh_builtins(monkeypatch, xonsh_events):
|
|||
builtins.__xonsh__.list_of_list_of_strs_outer_product = (
|
||||
list_of_list_of_strs_outer_product
|
||||
)
|
||||
builtins.__xonsh__.eval_fstring_field = eval_fstring_field
|
||||
builtins.__xonsh__.history = DummyHistory()
|
||||
builtins.__xonsh__.subproc_captured_stdout = sp
|
||||
builtins.__xonsh__.subproc_captured_inject = sp
|
||||
|
@ -160,5 +162,5 @@ def xonsh_builtins(monkeypatch, xonsh_events):
|
|||
|
||||
def pytest_configure(config):
|
||||
"""Abort test run if --flake8 requested, since it would hang on parser_test.py"""
|
||||
if config.getoption('--flake8', ''):
|
||||
if config.getoption("--flake8", ""):
|
||||
pytest.exit("pytest-flake8 no longer supported, use flake8 instead.")
|
||||
|
|
|
@ -463,3 +463,16 @@ def test_env_iterate():
|
|||
env.register(re.compile("re"))
|
||||
for key in env:
|
||||
assert isinstance(key, str)
|
||||
|
||||
|
||||
def test_env_iterate_rawkeys():
|
||||
env = Env(TEST=0)
|
||||
r = re.compile("re")
|
||||
env.register(r)
|
||||
saw_regex = False
|
||||
for key in env.rawkeys():
|
||||
if isinstance(key, str):
|
||||
continue
|
||||
elif isinstance(key, type(r)) and key.pattern == "re":
|
||||
saw_regex = True
|
||||
assert saw_regex
|
||||
|
|
|
@ -9,9 +9,9 @@ import pytest
|
|||
|
||||
from xonsh.ast import AST, With, Pass, Str, Call
|
||||
from xonsh.parser import Parser
|
||||
from xonsh.parsers.base import eval_fstr_fields
|
||||
from xonsh.parsers.fstring_adaptor import FStringAdaptor
|
||||
|
||||
from tools import nodes_equal, skip_if_no_walrus
|
||||
from tools import nodes_equal, skip_if_no_walrus, VER_MAJOR_MINOR
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
|
@ -123,28 +123,47 @@ def test_f_env_var():
|
|||
check_xonsh_ast({}, 'F"{$PATH} and {$XONSH_DEBUG}"', run=False)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"inp, exp",
|
||||
[
|
||||
('f"{}"', 'f"{}"'),
|
||||
('f"$HOME"', 'f"$HOME"'),
|
||||
('f"{0} - {1}"', 'f"{0} - {1}"'),
|
||||
fstring_adaptor_parameters = [
|
||||
('f"$HOME"', "$HOME"),
|
||||
('f"{0} - {1}"', "0 - 1"),
|
||||
('f"{$HOME}"', "/foo/bar"),
|
||||
('f"{ $HOME }"', "/foo/bar"),
|
||||
("f\"{'$HOME'}\"", "$HOME"),
|
||||
("f\"$HOME = {$HOME}\"", "$HOME = /foo/bar"),
|
||||
("f\"{${'HOME'}}\"", "/foo/bar"),
|
||||
("f'{${$FOO+$BAR}}'", "/foo/bar"),
|
||||
("f\"${$FOO}{$BAR}={f'{$HOME}'}\"", "$HOME=/foo/bar"),
|
||||
(
|
||||
'f"{$HOME}"',
|
||||
"f\"{__xonsh__.execer.eval(r'$HOME', glbs=globals(), locs=locals())}\"",
|
||||
'''f"""foo
|
||||
{f"_{$HOME}_"}
|
||||
bar"""''',
|
||||
"foo\n_/foo/bar_\nbar",
|
||||
),
|
||||
(
|
||||
'f"{ $HOME }"',
|
||||
"f\"{__xonsh__.execer.eval(r'$HOME ', glbs=globals(), locs=locals())}\"",
|
||||
'''f"""foo
|
||||
{f"_{${'HOME'}}_"}
|
||||
bar"""''',
|
||||
"foo\n_/foo/bar_\nbar",
|
||||
),
|
||||
(
|
||||
"f\"{'$HOME'}\"",
|
||||
"f\"{__xonsh__.execer.eval(r'\\'$HOME\\'', glbs=globals(), locs=locals())}\"",
|
||||
'''f"""foo
|
||||
{f"_{${ $FOO + $BAR }}_"}
|
||||
bar"""''',
|
||||
"foo\n_/foo/bar_\nbar",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_eval_fstr_fields(inp, exp):
|
||||
obs = eval_fstr_fields(inp, 'f"')
|
||||
]
|
||||
if VER_MAJOR_MINOR >= (3, 8):
|
||||
fstring_adaptor_parameters.append(("f'{$HOME=}'", "$HOME='/foo/bar'"))
|
||||
|
||||
|
||||
@pytest.mark.parametrize("inp, exp", fstring_adaptor_parameters)
|
||||
def test_fstring_adaptor(inp, exp):
|
||||
joined_str_node = FStringAdaptor(inp, "f").run()
|
||||
assert isinstance(joined_str_node, ast.JoinedStr)
|
||||
node = ast.Expression(body=joined_str_node)
|
||||
code = compile(node, "<test_fstring_adaptor>", mode="eval")
|
||||
builtins.__xonsh__.env = {"HOME": "/foo/bar", "FOO": "HO", "BAR": "ME"}
|
||||
obs = eval(code)
|
||||
assert exp == obs
|
||||
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ from xonsh.platform import minimum_required_ptk_version
|
|||
|
||||
# verify error if ptk not installed or below min
|
||||
|
||||
from xonsh.ptk_shell.shell import tokenize_ansi
|
||||
from xonsh.shell import Shell
|
||||
|
||||
|
||||
|
@ -26,7 +27,15 @@ from xonsh.shell import Shell
|
|||
((4, 0, 0), "prompt_toolkit", "prompt_toolkit", None, False),
|
||||
],
|
||||
)
|
||||
def test_prompt_toolkit_version_checks(ptk_ver, ini_shell_type, exp_shell_type, warn_snip, using_vended_ptk, monkeypatch, xonsh_builtins):
|
||||
def test_prompt_toolkit_version_checks(
|
||||
ptk_ver,
|
||||
ini_shell_type,
|
||||
exp_shell_type,
|
||||
warn_snip,
|
||||
using_vended_ptk,
|
||||
monkeypatch,
|
||||
xonsh_builtins,
|
||||
):
|
||||
|
||||
mocked_warn = ""
|
||||
|
||||
|
@ -43,9 +52,15 @@ def test_prompt_toolkit_version_checks(ptk_ver, ini_shell_type, exp_shell_type,
|
|||
nonlocal ptk_ver
|
||||
return ptk_ver is not None
|
||||
|
||||
monkeypatch.setattr("xonsh.shell.warnings.warn", mock_warning) # hardwon: patch the caller!
|
||||
monkeypatch.setattr("xonsh.shell.ptk_above_min_supported", mock_ptk_above_min_supported) # have to patch both callers
|
||||
monkeypatch.setattr("xonsh.platform.ptk_above_min_supported", mock_ptk_above_min_supported)
|
||||
monkeypatch.setattr(
|
||||
"xonsh.shell.warnings.warn", mock_warning
|
||||
) # hardwon: patch the caller!
|
||||
monkeypatch.setattr(
|
||||
"xonsh.shell.ptk_above_min_supported", mock_ptk_above_min_supported
|
||||
) # have to patch both callers
|
||||
monkeypatch.setattr(
|
||||
"xonsh.platform.ptk_above_min_supported", mock_ptk_above_min_supported
|
||||
)
|
||||
monkeypatch.setattr("xonsh.platform.has_prompt_toolkit", mock_has_prompt_toolkit)
|
||||
|
||||
old_syspath = sys.path.copy()
|
||||
|
@ -60,7 +75,6 @@ def test_prompt_toolkit_version_checks(ptk_ver, ini_shell_type, exp_shell_type,
|
|||
|
||||
sys.path = old_syspath
|
||||
|
||||
|
||||
assert act_shell_type == exp_shell_type
|
||||
|
||||
if warn_snip:
|
||||
|
@ -68,4 +82,27 @@ def test_prompt_toolkit_version_checks(ptk_ver, ini_shell_type, exp_shell_type,
|
|||
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"prompt_tokens, ansi_string_parts",
|
||||
[
|
||||
# no ansi, single token
|
||||
([("fake style", "no ansi here")], ["no ansi here"]),
|
||||
# no ansi, multiple tokens
|
||||
([("s1", "no"), ("s2", "ansi here")], ["no", "ansi here"]),
|
||||
# ansi only, multiple
|
||||
([("s1", "\x1b[33mansi \x1b[1monly")], ["", "ansi ", "only"]),
|
||||
# mixed
|
||||
(
|
||||
[("s1", "no ansi"), ("s2", "mixed \x1b[33mansi")],
|
||||
["no ansi", "mixed ", "ansi"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_tokenize_ansi(prompt_tokens, ansi_string_parts):
|
||||
ansi_tokens = tokenize_ansi(prompt_tokens)
|
||||
for token, text in zip(ansi_tokens, ansi_string_parts):
|
||||
assert token[1] == text
|
||||
|
||||
|
||||
# someday: initialize PromptToolkitShell and have it actually do something.
|
||||
|
|
|
@ -19,6 +19,7 @@ from xonsh.tools import (
|
|||
always_true,
|
||||
argvquote,
|
||||
bool_or_int_to_str,
|
||||
bool_or_none_to_str,
|
||||
bool_to_str,
|
||||
check_for_partial_string,
|
||||
dynamic_cwd_tuple_to_str,
|
||||
|
@ -32,6 +33,7 @@ from xonsh.tools import (
|
|||
find_next_break,
|
||||
is_bool,
|
||||
is_bool_or_int,
|
||||
is_bool_or_none,
|
||||
is_callable,
|
||||
is_dynamic_cwd_width,
|
||||
is_env_path,
|
||||
|
@ -46,6 +48,7 @@ from xonsh.tools import (
|
|||
subproc_toks,
|
||||
to_bool,
|
||||
to_bool_or_int,
|
||||
to_bool_or_none,
|
||||
to_dynamic_cwd_tuple,
|
||||
to_logfile_opt,
|
||||
pathsep_to_set,
|
||||
|
@ -1090,6 +1093,42 @@ def test_bool_or_int_to_str(inp, exp):
|
|||
assert exp == obs
|
||||
|
||||
|
||||
@pytest.mark.parametrize("inp", [True, False, None])
|
||||
def test_is_bool_or_none_true(inp):
|
||||
assert is_bool_or_none(inp)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("inp", [1, "yooo hooo!"])
|
||||
def test_is_bool_or_none_false(inp):
|
||||
assert not is_bool_or_none(inp)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"inp, exp",
|
||||
[
|
||||
(True, True),
|
||||
(False, False),
|
||||
(None, None),
|
||||
("", False),
|
||||
("0", False),
|
||||
("False", False),
|
||||
("NONE", None),
|
||||
("TRUE", True),
|
||||
("1", True),
|
||||
(0, False),
|
||||
(1, True),
|
||||
],
|
||||
)
|
||||
def test_to_bool_or_none(inp, exp):
|
||||
obs = to_bool_or_none(inp)
|
||||
assert exp == obs
|
||||
|
||||
|
||||
@pytest.mark.parametrize("inp, exp", [(True, "1"), (False, ""), (None, "None")])
|
||||
def test_bool_or_none_to_str(inp, exp):
|
||||
assert bool_or_none_to_str(inp) == exp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"inp, exp",
|
||||
[
|
||||
|
|
|
@ -101,6 +101,7 @@ from ast import (
|
|||
dump,
|
||||
walk,
|
||||
increment_lineno,
|
||||
Constant,
|
||||
)
|
||||
from ast import Ellipsis as EllipsisNode
|
||||
|
||||
|
|
|
@ -1061,6 +1061,14 @@ def list_of_list_of_strs_outer_product(x):
|
|||
return rtn
|
||||
|
||||
|
||||
def eval_fstring_field(field):
|
||||
"""Evaluates the argument in Xonsh context."""
|
||||
res = __xonsh__.execer.eval(
|
||||
field[0].strip(), glbs=globals(), locs=builtins.__xonsh__.ctx, filename=field[1]
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
@lazyobject
|
||||
def MACRO_FLAG_KINDS():
|
||||
return {
|
||||
|
@ -1403,8 +1411,8 @@ class XonshSession:
|
|||
self.all_jobs = {}
|
||||
self.ensure_list_of_strs = ensure_list_of_strs
|
||||
self.list_of_strs_or_callables = list_of_strs_or_callables
|
||||
|
||||
self.list_of_list_of_strs_outer_product = list_of_list_of_strs_outer_product
|
||||
self.eval_fstring_field = eval_fstring_field
|
||||
|
||||
self.completers = xonsh.completers.init.default_completers()
|
||||
self.call_macro = call_macro
|
||||
|
|
|
@ -41,6 +41,9 @@ from xonsh.tools import (
|
|||
is_bool,
|
||||
to_bool,
|
||||
bool_to_str,
|
||||
is_bool_or_none,
|
||||
to_bool_or_none,
|
||||
bool_or_none_to_str,
|
||||
is_history_tuple,
|
||||
to_history_tuple,
|
||||
history_tuple_to_str,
|
||||
|
@ -1298,9 +1301,9 @@ def DEFAULT_VARS():
|
|||
doc_configurable=False,
|
||||
),
|
||||
"THREAD_SUBPROCS": Var(
|
||||
is_bool,
|
||||
to_bool,
|
||||
bool_to_str,
|
||||
is_bool_or_none,
|
||||
to_bool_or_none,
|
||||
bool_or_none_to_str,
|
||||
True,
|
||||
"Whether or not to try to run subrocess mode in a Python thread, "
|
||||
"when applicable. There are various trade-offs, which normally "
|
||||
|
@ -1319,7 +1322,12 @@ def DEFAULT_VARS():
|
|||
"* Stopping the thread with ``Ctrl+Z`` yields to job control.\n"
|
||||
"* Threadable commands are run with ``Popen`` and threadable \n"
|
||||
" alias are run with ``ProcProxy``.\n\n"
|
||||
"The desired effect is often up to the command, user, or use case.",
|
||||
"The desired effect is often up to the command, user, or use case.\n\n"
|
||||
"None values are for internal use only and are used to turn off "
|
||||
"threading when loading xonshrc files. This is done because Bash "
|
||||
"was automatically placing new xonsh instances in the background "
|
||||
"at startup when threadable subprocs were used. Please see "
|
||||
"https://github.com/xonsh/xonsh/pull/3705 for more information.\n",
|
||||
),
|
||||
"TITLE": Var(
|
||||
is_string,
|
||||
|
@ -1415,7 +1423,8 @@ def DEFAULT_VARS():
|
|||
default_xonshrc,
|
||||
"A list of the locations of run control files, if they exist. User "
|
||||
"defined run control file will supersede values set in system-wide "
|
||||
"control file if there is a naming collision.",
|
||||
"control file if there is a naming collision. $THREAD_SUBPROCS=None "
|
||||
"when reading in run control files.",
|
||||
doc_default=(
|
||||
"On Linux & Mac OSX: ``['/etc/xonshrc', '~/.config/xonsh/rc.xsh', '~/.xonshrc']``\n"
|
||||
"\nOn Windows: "
|
||||
|
@ -1984,8 +1993,14 @@ class Env(cabc.MutableMapping):
|
|||
except KeyError:
|
||||
return default
|
||||
|
||||
def rawkeys(self):
|
||||
"""An iterator that returns all environment keys in their original form.
|
||||
This include string & compiled regular expression keys.
|
||||
"""
|
||||
yield from (set(self._d) | set(self._vars))
|
||||
|
||||
def __iter__(self):
|
||||
for key in set(self._d) | set(self._vars):
|
||||
for key in self.rawkeys():
|
||||
if isinstance(key, str):
|
||||
yield key
|
||||
|
||||
|
@ -2128,6 +2143,8 @@ def xonshrc_context(rcfiles=None, execer=None, ctx=None, env=None, login=True):
|
|||
ctx = {} if ctx is None else ctx
|
||||
if rcfiles is None:
|
||||
return env
|
||||
orig_thread = env.get("THREAD_SUBPROCS")
|
||||
env["THREAD_SUBPROCS"] = None
|
||||
env["XONSHRC"] = tuple(rcfiles)
|
||||
for rcfile in rcfiles:
|
||||
if not os.path.isfile(rcfile):
|
||||
|
@ -2136,6 +2153,8 @@ def xonshrc_context(rcfiles=None, execer=None, ctx=None, env=None, login=True):
|
|||
_, ext = os.path.splitext(rcfile)
|
||||
status = xonsh_script_run_control(rcfile, ctx, env, execer=execer, login=login)
|
||||
loaded.append(status)
|
||||
if env["THREAD_SUBPROCS"] is None:
|
||||
env["THREAD_SUBPROCS"] = orig_thread
|
||||
return ctx
|
||||
|
||||
|
||||
|
|
|
@ -188,7 +188,7 @@ class JsonHistoryGC(threading.Thread):
|
|||
files.append((os.path.getmtime(f), 0, f, cur_file_size))
|
||||
continue
|
||||
lj = xlj.LazyJSON(f, reopen=False)
|
||||
if lj["locked"] and lj["ts"][0] < boot:
|
||||
if lj.get("locked", False) and lj["ts"][0] < boot:
|
||||
# computer was rebooted between when this history was created
|
||||
# and now and so this history should be unlocked.
|
||||
hist = lj.load()
|
||||
|
@ -197,16 +197,12 @@ class JsonHistoryGC(threading.Thread):
|
|||
with open(f, "w", newline="\n") as fp:
|
||||
xlj.ljdump(hist, fp, sort_keys=True)
|
||||
lj = xlj.LazyJSON(f, reopen=False)
|
||||
if only_unlocked and lj["locked"]:
|
||||
if only_unlocked and lj.get("locked", False):
|
||||
continue
|
||||
# info: file size, closing timestamp, number of commands, filename
|
||||
ts = lj.get("ts", (0.0, None))
|
||||
files.append(
|
||||
(
|
||||
lj["ts"][1] or lj["ts"][0],
|
||||
len(lj.sizes["cmds"]) - 1,
|
||||
f,
|
||||
cur_file_size,
|
||||
),
|
||||
(ts[1] or ts[0], len(lj.sizes["cmds"]) - 1, f, cur_file_size,),
|
||||
)
|
||||
lj.close()
|
||||
if xonsh_debug:
|
||||
|
|
|
@ -10,14 +10,14 @@ from collections.abc import Iterable, Sequence, Mapping
|
|||
|
||||
from xonsh.ply.ply import yacc
|
||||
|
||||
from xonsh.tools import FORMATTER
|
||||
from xonsh import ast
|
||||
from xonsh.ast import has_elts, xonsh_call, load_attribute_chain
|
||||
from xonsh.lexer import Lexer, LexToken
|
||||
from xonsh.platform import PYTHON_VERSION_INFO
|
||||
from xonsh.tokenize import SearchPath, StringPrefix
|
||||
from xonsh.lazyasd import LazyObject, lazyobject
|
||||
from xonsh.lazyasd import LazyObject
|
||||
from xonsh.parsers.context_check import check_contexts
|
||||
from xonsh.parsers.fstring_adaptor import FStringAdaptor
|
||||
|
||||
|
||||
RE_SEARCHPATH = LazyObject(lambda: re.compile(SearchPath), globals(), "RE_SEARCHPATH")
|
||||
|
@ -26,11 +26,6 @@ RE_STRINGPREFIX = LazyObject(
|
|||
)
|
||||
|
||||
|
||||
@lazyobject
|
||||
def RE_FSTR_EVAL_CHARS():
|
||||
return re.compile(".*?[!@$`]")
|
||||
|
||||
|
||||
class Location(object):
|
||||
"""Location in a file."""
|
||||
|
||||
|
@ -211,49 +206,6 @@ def hasglobstar(x):
|
|||
return False
|
||||
|
||||
|
||||
def _wrap_fstr_field(field, spec, conv):
|
||||
rtn = "{" + field
|
||||
if conv:
|
||||
rtn += "!" + conv
|
||||
if spec:
|
||||
rtn += ":" + spec
|
||||
rtn += "}"
|
||||
return rtn
|
||||
|
||||
|
||||
def eval_fstr_fields(fstring, prefix, filename=None):
|
||||
"""Takes an fstring (and its prefix, ie f") that may contain
|
||||
xonsh expressions as its field values and
|
||||
substitues them for a xonsh eval() call as needed. Roughly,
|
||||
for example, this will take f"{$HOME}" and transform it to
|
||||
be f"{__xonsh__.execer.eval(r'$HOME')}".
|
||||
"""
|
||||
last = fstring[-1]
|
||||
q, r = ("'", r"\'") if last == '"' else ('"', r"\"")
|
||||
prelen = len(prefix)
|
||||
postlen = len(fstring) - len(fstring.rstrip(last))
|
||||
template = fstring[prelen:-postlen]
|
||||
repl = prefix
|
||||
for literal, field, spec, conv in FORMATTER.parse(template):
|
||||
repl += literal
|
||||
if field is None:
|
||||
continue
|
||||
elif RE_FSTR_EVAL_CHARS.match(field) is None:
|
||||
# just a normal python field, simply reconstruct.
|
||||
repl += _wrap_fstr_field(field, spec, conv)
|
||||
else:
|
||||
# the field has a special xonsh character, so we must eval it
|
||||
eval_field = "__xonsh__.execer.eval(r" + q
|
||||
eval_field += field.lstrip().replace(q, r)
|
||||
eval_field += q + ", glbs=globals(), locs=locals()"
|
||||
if filename is not None:
|
||||
eval_field += ", filename=" + q + filename + q
|
||||
eval_field += ")"
|
||||
repl += _wrap_fstr_field(eval_field, spec, conv)
|
||||
repl += last * postlen
|
||||
return repl
|
||||
|
||||
|
||||
class YaccLoader(Thread):
|
||||
"""Thread to load (but not shave) the yacc parser."""
|
||||
|
||||
|
@ -2219,7 +2171,7 @@ class BaseParser(object):
|
|||
func=leader,
|
||||
lineno=leader.lineno,
|
||||
col_offset=leader.col_offset,
|
||||
**trailer
|
||||
**trailer,
|
||||
)
|
||||
elif isinstance(trailer, (ast.Tuple, tuple)):
|
||||
# call macro functions
|
||||
|
@ -2435,13 +2387,23 @@ class BaseParser(object):
|
|||
if "p" in prefix and "f" in prefix:
|
||||
new_pref = prefix.replace("p", "")
|
||||
value_without_p = new_pref + p1.value[len(prefix) :]
|
||||
s = eval_fstr_fields(value_without_p, new_pref, filename=self.lexer.fname)
|
||||
s = pyparse(s).body[0].value
|
||||
try:
|
||||
s = pyparse(value_without_p).body[0].value
|
||||
except SyntaxError:
|
||||
s = None
|
||||
if s is None:
|
||||
try:
|
||||
s = FStringAdaptor(
|
||||
value_without_p, new_pref, filename=self.lexer.fname
|
||||
).run()
|
||||
except SyntaxError as e:
|
||||
self._parse_error(
|
||||
str(e), self.currloc(lineno=p1.lineno, column=p1.lexpos)
|
||||
)
|
||||
s = ast.increment_lineno(s, p1.lineno - 1)
|
||||
p[0] = xonsh_call(
|
||||
"__xonsh__.path_literal", [s], lineno=p1.lineno, col=p1.lexpos
|
||||
)
|
||||
|
||||
elif "p" in prefix:
|
||||
value_without_p = prefix.replace("p", "") + p1.value[len(prefix) :]
|
||||
s = ast.Str(
|
||||
|
@ -2453,9 +2415,22 @@ class BaseParser(object):
|
|||
"__xonsh__.path_literal", [s], lineno=p1.lineno, col=p1.lexpos
|
||||
)
|
||||
elif "f" in prefix:
|
||||
s = eval_fstr_fields(p1.value, prefix, filename=self.lexer.fname)
|
||||
s = pyparse(s).body[0].value
|
||||
try:
|
||||
s = pyparse(p1.value).body[0].value
|
||||
except SyntaxError:
|
||||
s = None
|
||||
if s is None:
|
||||
try:
|
||||
s = FStringAdaptor(
|
||||
p1.value, prefix, filename=self.lexer.fname
|
||||
).run()
|
||||
except SyntaxError as e:
|
||||
self._parse_error(
|
||||
str(e), self.currloc(lineno=p1.lineno, column=p1.lexpos)
|
||||
)
|
||||
s = ast.increment_lineno(s, p1.lineno - 1)
|
||||
if "r" in prefix:
|
||||
setattr(s, "is_raw", True)
|
||||
p[0] = s
|
||||
else:
|
||||
s = ast.literal_eval(p1.value)
|
||||
|
|
196
xonsh/parsers/fstring_adaptor.py
Normal file
196
xonsh/parsers/fstring_adaptor.py
Normal file
|
@ -0,0 +1,196 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Implements helper class for parsing Xonsh syntax within f-strings."""
|
||||
import re
|
||||
from ast import parse as pyparse
|
||||
|
||||
from xonsh import ast
|
||||
from xonsh.lazyasd import lazyobject
|
||||
from xonsh.platform import PYTHON_VERSION_INFO
|
||||
|
||||
|
||||
@lazyobject
|
||||
def RE_FSTR_FIELD_WRAPPER():
|
||||
if PYTHON_VERSION_INFO > (3, 8):
|
||||
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\))\s*[^=]")
|
||||
else:
|
||||
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\))")
|
||||
|
||||
|
||||
if PYTHON_VERSION_INFO > (3, 8):
|
||||
|
||||
@lazyobject
|
||||
def RE_FSTR_SELF_DOC_FIELD_WRAPPER():
|
||||
return re.compile(r"(__xonsh__\.eval_fstring_field\((\d+)\)\s*)=")
|
||||
|
||||
|
||||
class FStringAdaptor:
|
||||
"""Helper for parsing Xonsh syntax within f-strings."""
|
||||
|
||||
def __init__(self, fstring, prefix, filename=None):
|
||||
"""Parses an f-string containing special Xonsh syntax and returns
|
||||
ast.JoinedStr AST node instance representing the input string.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
fstring : str
|
||||
The input f-string.
|
||||
prefix : str
|
||||
Prefix of the f-string (e.g. "fr").
|
||||
filename : str, optional
|
||||
File from which the code was read or any string describing
|
||||
origin of the code.
|
||||
"""
|
||||
self.fstring = fstring
|
||||
self.prefix = prefix
|
||||
self.filename = filename
|
||||
self.fields = {}
|
||||
self.repl = ""
|
||||
self.res = None
|
||||
|
||||
def _patch_special_syntax(self):
|
||||
"""Takes an fstring (and its prefix, ie "f") that may contain
|
||||
xonsh expressions as its field values and substitues them for
|
||||
a call to __xonsh__.eval_fstring_field as needed.
|
||||
"""
|
||||
prelen = len(self.prefix)
|
||||
quote = self.fstring[prelen]
|
||||
if self.fstring[prelen + 1] == quote:
|
||||
quote *= 3
|
||||
template = self.fstring[prelen + len(quote) : -len(quote)]
|
||||
while True:
|
||||
repl = self.prefix + quote + template + quote
|
||||
try:
|
||||
res = pyparse(repl)
|
||||
break
|
||||
except SyntaxError as e:
|
||||
# The e.text attribute is expected to contain the failing
|
||||
# expression, e.g. "($HOME)" for f"{$HOME}" string.
|
||||
if e.text is None or e.text[0] != "(":
|
||||
raise
|
||||
error_expr = e.text[1:-1]
|
||||
epos = template.find(error_expr)
|
||||
if epos < 0:
|
||||
raise
|
||||
# We can olny get here in the case of handled SyntaxError.
|
||||
# Patch the last error and start over.
|
||||
xonsh_field = (error_expr, self.filename if self.filename else None)
|
||||
field_id = id(xonsh_field)
|
||||
self.fields[field_id] = xonsh_field
|
||||
eval_field = f"__xonsh__.eval_fstring_field({field_id})"
|
||||
template = template[:epos] + eval_field + template[epos + len(error_expr) :]
|
||||
|
||||
self.repl = repl
|
||||
self.res = res.body[0].value
|
||||
|
||||
def _unpatch_strings(self):
|
||||
"""Reverts false-positive field matches within strings."""
|
||||
reparse = False
|
||||
for node in ast.walk(self.res):
|
||||
if isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
value = node.value
|
||||
elif isinstance(node, ast.Str):
|
||||
value = node.s
|
||||
else:
|
||||
continue
|
||||
|
||||
match = RE_FSTR_FIELD_WRAPPER.search(value)
|
||||
if match is None:
|
||||
continue
|
||||
field = self.fields.pop(int(match.group(2)), None)
|
||||
if field is None:
|
||||
continue
|
||||
self.repl = self.repl.replace(match.group(1), field[0], 1)
|
||||
reparse = True
|
||||
|
||||
if reparse:
|
||||
self.res = pyparse(self.repl).body[0].value
|
||||
|
||||
def _unpatch_selfdoc_strings(self):
|
||||
"""Reverts false-positive matches within Python 3.8 sef-documenting
|
||||
f-string expressions."""
|
||||
for node in ast.walk(self.res):
|
||||
if isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
value = node.value
|
||||
elif isinstance(node, ast.Str):
|
||||
value = node.s
|
||||
else:
|
||||
continue
|
||||
|
||||
match = RE_FSTR_SELF_DOC_FIELD_WRAPPER.search(value)
|
||||
if match is None:
|
||||
continue
|
||||
field = self.fields.get(int(match.group(2)), None)
|
||||
if field is None:
|
||||
continue
|
||||
value = value.replace(match.group(1), field[0], 1)
|
||||
if isinstance(node, ast.Str):
|
||||
node.s = value
|
||||
else:
|
||||
node.value = value
|
||||
|
||||
def _fix_eval_field_params(self):
|
||||
"""Replace f-string field ID placeholders with the actual field
|
||||
expressions."""
|
||||
for node in ast.walk(self.res):
|
||||
if not (
|
||||
isinstance(node, ast.Call)
|
||||
and node.func.value.id == "__xonsh__"
|
||||
and node.func.attr == "eval_fstring_field"
|
||||
and len(node.args) > 0
|
||||
):
|
||||
continue
|
||||
|
||||
if PYTHON_VERSION_INFO > (3, 8):
|
||||
if isinstance(node.args[0], ast.Constant) and isinstance(
|
||||
node.args[0].value, int
|
||||
):
|
||||
field = self.fields.pop(node.args[0].value, None)
|
||||
if field is None:
|
||||
continue
|
||||
lineno = node.args[0].lineno
|
||||
col_offset = node.args[0].col_offset
|
||||
field_node = ast.Tuple(
|
||||
elts=[
|
||||
ast.Constant(
|
||||
value=field[0], lineno=lineno, col_offset=col_offset
|
||||
),
|
||||
ast.Constant(
|
||||
value=field[1], lineno=lineno, col_offset=col_offset
|
||||
),
|
||||
],
|
||||
ctx=ast.Load(),
|
||||
lineno=lineno,
|
||||
col_offset=col_offset,
|
||||
)
|
||||
node.args[0] = field_node
|
||||
elif isinstance(node.args[0], ast.Num):
|
||||
field = self.fields.pop(node.args[0].n, None)
|
||||
if field is None:
|
||||
continue
|
||||
lineno = node.args[0].lineno
|
||||
col_offset = node.args[0].col_offset
|
||||
elts = [ast.Str(s=field[0], lineno=lineno, col_offset=col_offset)]
|
||||
if field[1] is not None:
|
||||
elts.append(
|
||||
ast.Str(s=field[1], lineno=lineno, col_offset=col_offset)
|
||||
)
|
||||
else:
|
||||
elts.append(
|
||||
ast.NameConstant(
|
||||
value=None, lineno=lineno, col_offset=col_offset
|
||||
)
|
||||
)
|
||||
field_node = ast.Tuple(
|
||||
elts=elts, ctx=ast.Load(), lineno=lineno, col_offset=col_offset,
|
||||
)
|
||||
node.args[0] = field_node
|
||||
|
||||
def run(self):
|
||||
"""Runs the parser. Returns ast.JoinedStr instance."""
|
||||
self._patch_special_syntax()
|
||||
self._unpatch_strings()
|
||||
if PYTHON_VERSION_INFO > (3, 8):
|
||||
self._unpatch_selfdoc_strings()
|
||||
self._fix_eval_field_params()
|
||||
assert len(self.fields) == 0
|
||||
return self.res
|
|
@ -17,6 +17,7 @@ from xonsh.ptk_shell.history import PromptToolkitHistory, _cust_history_matches
|
|||
from xonsh.ptk_shell.completer import PromptToolkitCompleter
|
||||
from xonsh.ptk_shell.key_bindings import load_xonsh_bindings
|
||||
|
||||
from prompt_toolkit import ANSI
|
||||
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
|
||||
from prompt_toolkit.lexers import PygmentsLexer
|
||||
from prompt_toolkit.enums import EditingMode
|
||||
|
@ -25,7 +26,7 @@ from prompt_toolkit.history import ThreadedHistory
|
|||
from prompt_toolkit.shortcuts import print_formatted_text as ptk_print
|
||||
from prompt_toolkit.shortcuts import CompleteStyle
|
||||
from prompt_toolkit.shortcuts.prompt import PromptSession
|
||||
from prompt_toolkit.formatted_text import PygmentsTokens
|
||||
from prompt_toolkit.formatted_text import PygmentsTokens, to_formatted_text
|
||||
from prompt_toolkit.styles import merge_styles, Style
|
||||
from prompt_toolkit.styles.pygments import (
|
||||
style_from_pygments_cls,
|
||||
|
@ -46,6 +47,32 @@ Fired after prompt toolkit has been initialized
|
|||
)
|
||||
|
||||
|
||||
def tokenize_ansi(tokens):
|
||||
"""Checks a list of (token, str) tuples for ANSI escape sequences and
|
||||
extends the token list with the new formatted entries.
|
||||
During processing tokens are converted to ``prompt_toolkit.FormattedText``.
|
||||
Returns a list of similar (token, str) tuples.
|
||||
"""
|
||||
formatted_tokens = to_formatted_text(tokens)
|
||||
ansi_tokens = []
|
||||
for style, text in formatted_tokens:
|
||||
if "\x1b" in text:
|
||||
formatted_ansi = to_formatted_text(ANSI(text))
|
||||
ansi_text = ""
|
||||
prev_style = ""
|
||||
for ansi_style, ansi_text_part in formatted_ansi:
|
||||
if prev_style == ansi_style:
|
||||
ansi_text += ansi_text_part
|
||||
else:
|
||||
ansi_tokens.append((prev_style or style, ansi_text))
|
||||
prev_style = ansi_style
|
||||
ansi_text = ansi_text_part
|
||||
ansi_tokens.append((prev_style or style, ansi_text))
|
||||
else:
|
||||
ansi_tokens.append((style, text))
|
||||
return ansi_tokens
|
||||
|
||||
|
||||
class PromptToolkitShell(BaseShell):
|
||||
"""The xonsh shell for prompt_toolkit v2 and later."""
|
||||
|
||||
|
@ -220,7 +247,7 @@ class PromptToolkitShell(BaseShell):
|
|||
carriage_return()
|
||||
self._first_prompt = False
|
||||
self.settitle()
|
||||
return PygmentsTokens(toks)
|
||||
return tokenize_ansi(PygmentsTokens(toks))
|
||||
|
||||
def rprompt_tokens(self):
|
||||
"""Returns a list of (token, str) tuples for the current right
|
||||
|
@ -237,7 +264,7 @@ class PromptToolkitShell(BaseShell):
|
|||
except Exception: # pylint: disable=broad-except
|
||||
print_exception()
|
||||
toks = partial_color_tokenize(p)
|
||||
return PygmentsTokens(toks)
|
||||
return tokenize_ansi(PygmentsTokens(toks))
|
||||
|
||||
def _bottom_toolbar_tokens(self):
|
||||
"""Returns a list of (token, str) tuples for the current bottom
|
||||
|
@ -251,7 +278,7 @@ class PromptToolkitShell(BaseShell):
|
|||
except Exception: # pylint: disable=broad-except
|
||||
print_exception()
|
||||
toks = partial_color_tokenize(p)
|
||||
return PygmentsTokens(toks)
|
||||
return tokenize_ansi(PygmentsTokens(toks))
|
||||
|
||||
@property
|
||||
def bottom_toolbar_tokens(self):
|
||||
|
|
|
@ -1171,6 +1171,11 @@ def is_bool(x):
|
|||
return isinstance(x, bool)
|
||||
|
||||
|
||||
def is_bool_or_none(x):
|
||||
"""Tests if something is a boolean or None."""
|
||||
return (x is None) or isinstance(x, bool)
|
||||
|
||||
|
||||
def is_logfile_opt(x):
|
||||
"""
|
||||
Checks if x is a valid $XONSH_TRACEBACK_LOGFILE option. Returns False
|
||||
|
@ -1234,6 +1239,20 @@ def to_bool(x):
|
|||
return bool(x)
|
||||
|
||||
|
||||
def to_bool_or_none(x):
|
||||
""""Converts to a boolean or none in a semantically meaningful way."""
|
||||
if x is None or isinstance(x, bool):
|
||||
return x
|
||||
elif isinstance(x, str):
|
||||
low_x = x.lower()
|
||||
if low_x == "none":
|
||||
return None
|
||||
else:
|
||||
return False if x.lower() in _FALSES else True
|
||||
else:
|
||||
return bool(x)
|
||||
|
||||
|
||||
def to_itself(x):
|
||||
"""No conversion, returns itself."""
|
||||
return x
|
||||
|
@ -1246,6 +1265,14 @@ def bool_to_str(x):
|
|||
return "1" if x else ""
|
||||
|
||||
|
||||
def bool_or_none_to_str(x):
|
||||
"""Converts a bool or None value to a string."""
|
||||
if x is None:
|
||||
return "None"
|
||||
else:
|
||||
return "1" if x else ""
|
||||
|
||||
|
||||
_BREAKS = LazyObject(
|
||||
lambda: frozenset(["b", "break", "s", "skip", "q", "quit"]), globals(), "_BREAKS"
|
||||
)
|
||||
|
|
|
@ -332,9 +332,10 @@ class Vox(collections.abc.Mapping):
|
|||
|
||||
Returns None if no environment is active.
|
||||
"""
|
||||
if "VIRTUAL_ENV" not in builtins.__xonsh__.env:
|
||||
env = builtins.__xonsh__.env
|
||||
if not env["VIRTUAL_ENV"]:
|
||||
return
|
||||
env_path = builtins.__xonsh__.env["VIRTUAL_ENV"]
|
||||
env_path = env["VIRTUAL_ENV"]
|
||||
if env_path.startswith(self.venvdir):
|
||||
name = env_path[len(self.venvdir) :]
|
||||
if name[0] in "/\\":
|
||||
|
@ -354,7 +355,7 @@ class Vox(collections.abc.Mapping):
|
|||
"""
|
||||
env = builtins.__xonsh__.env
|
||||
ve = self[name]
|
||||
if "VIRTUAL_ENV" in env:
|
||||
if env["VIRTUAL_ENV"]:
|
||||
self.deactivate()
|
||||
|
||||
type(self).oldvars = {"PATH": list(env["PATH"])}
|
||||
|
@ -370,7 +371,7 @@ class Vox(collections.abc.Mapping):
|
|||
Deactivate the active virtual environment. Returns its name.
|
||||
"""
|
||||
env = builtins.__xonsh__.env
|
||||
if "VIRTUAL_ENV" not in env:
|
||||
if not env["VIRTUAL_ENV"]:
|
||||
raise NoEnvironmentActive("No environment currently active.")
|
||||
|
||||
env_name = self.active()
|
||||
|
@ -380,7 +381,7 @@ class Vox(collections.abc.Mapping):
|
|||
env[k] = v
|
||||
del type(self).oldvars
|
||||
|
||||
env.pop("VIRTUAL_ENV")
|
||||
del env["VIRTUAL_ENV"]
|
||||
|
||||
events.vox_on_deactivate.fire(name=env_name, path=self[env_name].env)
|
||||
return env_name
|
||||
|
|
Loading…
Add table
Reference in a new issue