Merge branch 'master' into bash-to-xhs-doc

This commit is contained in:
laerus 2016-06-17 14:30:11 +03:00
commit 840815c806
45 changed files with 2258 additions and 2865 deletions

1
.gitignore vendored
View file

@ -3,6 +3,7 @@
*.xcf
*.egg
.eggs/
__amalgam__.py
lexer_table.py
parser_table.py
parser_test_table.py

View file

@ -94,6 +94,32 @@ If you want to lint the entire code base run::
$ pylint $(find tests xonsh -name \*.py | sort)
**********
Imports
**********
Xonsh source code may be amalgamated into a single file (``__amalgam__.py``)
to speed up imports. The way the code amalgamater works is that other modules
that are in the same package (and amalgamated) should be imported with::
from pkg.x import a, c, d
This is because the amalgamater puts all such modules in the same globals(),
which is effectively what the from-imports do. For example, ``xonsh.ast`` and
``xonsh.execer`` are both in the same package (``xonsh``). Thus they should use
the above from from-import syntax.
Alternatively, for modules outside of the current package (or modules that are
not amalgamated) the import statement should be either ``import pkg.x`` or
``import pkg.x as name``. This is because these are the only cases where the
amalgamater is able to automatically insert lazy imports in way that is guarantted
to be safe. This is due to the ambiguity that ``from pkg.x import name`` may
import a variable that cannot be lazily constructed or may import a module.
So the simple rules to follow are that:
1. Import objects from modules in the same package directly in using from-import,
2. Import objects from moudules outside of the package via a direct import
or import-as statement.
How to Test
================

378
amalgamate.py Executable file
View file

@ -0,0 +1,378 @@
#!/usr/bin/env python
"""A package-based, source code amalgamater."""
import os
import sys
import pprint
from itertools import repeat
from collections import namedtuple
from collections.abc import Mapping
from ast import parse, walk, literal_eval, Import, ImportFrom
ModNode = namedtuple('ModNode', ['name', 'pkgdeps', 'extdeps'])
ModNode.__doc__ = """Module node for dependency graph.
Attributes
----------
name : str
Module name.
pkgdeps : frozenset of str
Module dependencies in the same package.
extdeps : frozenset of str
External module dependencies from outside of the package.
"""
class SourceCache(Mapping):
"""Stores / loads source code for files based on package and module names."""
def __init__(self, *args, **kwargs):
self._d = dict(*args, **kwargs)
def __getitem__(self, key):
d = self._d
if key in d:
return d[key]
pkg, name = key
pkgdir = pkg.replace('.', os.sep)
fname = pkgdir + os.sep + name + '.py'
with open(fname) as f:
raw = f.read()
d[key] = raw
return raw
def __iter__(self):
yield from self._d
def __len__(self):
return len(self._d)
SOURCES = SourceCache()
def make_node(name, pkg, allowed):
"""Makes a node by parsing a file and traversing its AST."""
raw = SOURCES[pkg, name]
tree = parse(raw, filename=name)
# we only want to deal with global import statements
pkgdot = pkg + '.'
pkgdeps = set()
extdeps = set()
for a in tree.body:
if isinstance(a, Import):
for n in a.names:
p, dot, m = n.name.rpartition('.')
if p == pkg and m in allowed:
pkgdeps.add(m)
else:
extdeps.add(n.name)
elif isinstance(a, ImportFrom):
if a.module == pkg:
pkgdeps.update(n.name for n in a.names if n.name in allowed)
elif a.module.startswith(pkgdot):
p, dot, m = a.module.rpartition('.')
if p == pkg and m in allowed:
pkgdeps.add(m)
else:
extdeps.add(a.module)
return ModNode(name, frozenset(pkgdeps), frozenset(extdeps))
def make_graph(pkg, exclude=None):
"""Create a graph (dict) of module dependencies."""
graph = {}
pkgdir = pkg.replace('.', os.sep)
allowed = set()
files = os.listdir(pkgdir)
for fname in files:
base, ext = os.path.splitext(fname)
if base.startswith('__') or ext != '.py':
continue
allowed.add(base)
if exclude:
allowed -= exclude
for base in allowed:
graph[base] = make_node(base, pkg, allowed)
return graph
def depsort(graph):
"""Sort modules by dependency."""
remaining = set(graph.keys())
seder = []
solved = set()
while 0 < len(remaining):
nodeps = {m for m in remaining if len(graph[m].pkgdeps - solved) == 0}
if len(nodeps) == 0:
msg = ('\nsolved order = {0}\nremaining = {1}\nCycle detected in '
'module graph!').format(pprint.pformat(seder),
pprint.pformat(remaining))
raise RuntimeError(msg)
solved |= nodeps
remaining -= nodeps
seder += sorted(nodeps)
return seder
LAZY_IMPORTS = """
from sys import modules as _modules
from types import ModuleType as _ModuleType
from importlib import import_module as _import_module
class _LazyModule(_ModuleType):
def __init__(self, pkg, mod, asname=None):
'''Lazy module 'pkg.mod' in package 'pkg'.'''
self.__dct__ = {
'loaded': False,
'pkg': pkg, # pkg
'mod': mod, # pkg.mod
'asname': asname, # alias
}
@classmethod
def load(cls, pkg, mod, asname=None):
if mod in _modules:
return _modules[pkg]
else:
return cls(pkg, mod, asname)
def __getattribute__(self, name):
if name == '__dct__':
return super().__getattribute__(name)
dct = self.__dct__
mod = dct['mod']
if dct['loaded']:
m = _modules[mod]
else:
m = _import_module(mod)
glbs = globals()
pkg = dct['pkg']
asname = dct['asname']
if asname is None:
glbs[pkg] = _modules[pkg]
else:
glbs[asname] = m
dct['loaded'] = True
return getattr(m, name)
"""
def get_lineno(node, default=0):
"""Gets the lineno of a node or returns the default."""
return getattr(node, 'lineno', default)
def min_line(node):
"""Computes the minimum lineno."""
node_line = get_lineno(node)
return min(map(get_lineno, walk(node), repeat(node_line)))
def format_import(names):
"""Format an import line"""
parts = []
for _, name, asname in names:
if asname is None:
parts.append(name)
else:
parts.append(name + ' as ' + asname)
line = 'import ' + ', '.join(parts) + '\n'
return line
def format_lazy_import(names):
"""Formats lazy import lines"""
lines = ''
for _, name, asname in names:
pkg, _, _ = name.partition('.')
target = asname or pkg
if asname is None:
line = '{pkg} = _LazyModule.load({pkg!r}, {mod!r})\n'
else:
line = '{asname} = _LazyModule.load({pkg!r}, {mod!r}, {asname!r})\n'
lines += line.format(pkg=pkg, mod=name, asname=asname)
return lines
def format_from_import(names):
"""Format a from import line"""
parts = []
for _, module, name, asname in names:
if asname is None:
parts.append(name)
else:
parts.append(name + ' as ' + asname)
line = 'from ' + module
line += ' import ' + ', '.join(parts) + '\n'
return line
def rewrite_imports(name, pkg, order, imps):
"""Rewrite the global imports in the file given the amalgamation."""
pkgdot = pkg + '.'
raw = SOURCES[pkg, name]
tree = parse(raw, filename=name)
replacements = [] # list of (startline, stopline, str) tuples
# collect replacements in forward direction
for a, b in zip(tree.body, tree.body[1:] + [None]):
if not isinstance(a, (Import, ImportFrom)):
continue
start = min_line(a) - 1
stop = len(tree.body) if b is None else min_line(b) - 1
if isinstance(a, Import):
keep = []
for n in a.names:
p, dot, m = n.name.rpartition('.')
if p == pkg and m in order:
msg = ('Cannot amalgamate almagate import of '
'amalgamated module:\n\n import {0}.{1}\n'
'\nin {0}/{2}.py').format(pkg, n.name, name)
raise RuntimeError(msg)
imp = (Import, n.name, n.asname)
if imp not in imps:
imps.add(imp)
keep.append(imp)
if len(keep) == 0:
s = ', '.join(n.name for n in a.names)
s = '# amalgamated ' + s + '\n'
else:
s = format_lazy_import(keep)
replacements.append((start, stop, s))
elif isinstance(a, ImportFrom):
p, dot, m = a.module.rpartition('.')
if a.module == pkg:
for n in a.names:
if n.name in order:
msg = ('Cannot amalgamate import of '
'amalgamated module:\n\n from {0} import {1}\n'
'\nin {0}/{2}.py').format(pkg, n.name, name)
raise RuntimeError(msg)
elif a.module.startswith(pkgdot) and p == pkg and m in order:
replacements.append((start, stop,
'# amalgamated ' + a.module + '\n'))
else:
keep = []
for n in a.names:
imp = (ImportFrom, a.module, n.name, n.asname)
if imp not in imps:
imps.add(imp)
keep.append(imp)
if len(keep) == len(a.names):
continue # all new imports
elif len(keep) == 0:
s = ', '.join(n.name for n in a.names)
s = '# amalgamated from ' + a.module + ' import ' + s + '\n'
else:
s = format_from_import(keep)
replacements.append((start, stop, s))
# apply replacements in reverse
lines = raw.splitlines(keepends=True)
for start, stop, s in replacements[::-1]:
lines[start] = s
for i in range(stop - start - 1):
del lines[start+1]
return ''.join(lines)
def amalgamate(order, graph, pkg):
"""Create amalgamated source."""
src = ('\"\"\"Amalgamation of {0} package, made up of the following '
'modules, in order:\n\n* ').format(pkg)
src += '\n* '.join(order)
src += '\n\n\"\"\"\n'
src += LAZY_IMPORTS
imps = set()
for name in order:
lines = rewrite_imports(name, pkg, order, imps)
src += '#\n# ' + name + '\n#\n' + lines + '\n'
return src
def write_amalgam(src, pkg):
"""Write out __amalgam__.py file"""
pkgdir = pkg.replace('.', os.sep)
fname = os.path.join(pkgdir, '__amalgam__.py')
with open(fname, 'w') as f:
f.write(src)
def _init_name_lines(pkg):
pkgdir = pkg.replace('.', os.sep)
fname = os.path.join(pkgdir, '__init__.py')
with open(fname) as f:
raw = f.read()
lines = raw.splitlines()
return fname, lines
def read_exclude(pkg):
"""reads in modules to exclude from __init__.py"""
_, lines = _init_name_lines(pkg)
exclude = set()
for line in lines:
if line.startswith('# amalgamate exclude'):
exclude.update(line.split()[3:])
return exclude
FAKE_LOAD = """
import os as _os
if _os.getenv('{debug}', ''):
pass
else:
import sys as _sys
try:
from {pkg} import __amalgam__
{load}
del __amalgam__
except ImportError:
pass
del _sys
del _os
""".strip()
def rewrite_init(pkg, order, debug='DEBUG'):
"""Rewrites the init file to insert modules."""
fname, lines = _init_name_lines(pkg)
for i, line in enumerate(lines):
if line.startswith('# amalgamate end'):
stop = i
elif line.startswith('# amalgamate'):
start = i
t = ("{1} = __amalgam__\n "
"_sys.modules['{0}.{1}'] = __amalgam__")
load = '\n '.join(t.format(pkg, m) for m in order)
s = FAKE_LOAD.format(pkg=pkg, load=load, debug=debug)
if start + 1 == stop:
lines.insert(stop, s)
else:
lines[start+1] = s
lines = lines[:start+2] + lines[stop:]
init = '\n'.join(lines)
with open(fname, 'w') as f:
f.write(init)
def main(args=None):
if args is None:
args = sys.argv
debug = 'DEBUG'
for pkg in args[1:]:
if pkg.startswith('--debug='):
debug = pkg[8:]
continue
print('Amalgamating ' + pkg)
exclude = read_exclude(pkg)
print(' excluding {}'.format(pprint.pformat(exclude)))
graph = make_graph(pkg, exclude=exclude)
order = depsort(graph)
src = amalgamate(order, graph, pkg)
write_amalgam(src, pkg)
rewrite_init(pkg, order, debug=debug)
print(' collapsed {} modules'.format(len(order)))
if __name__ == '__main__':
main()

43
news/amal.rst Normal file
View file

@ -0,0 +1,43 @@
**Added:**
* New amalgamate tool collapses modules inside of a package into a single
``__amalgam__.py`` module. This tool glues together all of the code from the
modules in a package, finds and removes intra-package imports, makes all
non-package imports lazy, and adds hooks into the ``__init__.py``.
This helps makes initial imports of modules fast and decreases startup time.
Packages and sub-packages must be amalgamated separately.
* New lazy and self-destructive module ``xonsh.lazyasd`` adds a suite of
classes for delayed creation of objects.
- A ``LazyObject`` won't be created until it has an attribute accessed.
- A ``LazyDict`` will load each value only when a key is accessed.
- A ``LazyBool`` will only be created when ``__bool__()`` is called.
Additionally, when fully loaded, the above objects will replace themselves
by name in the context that they were handed, thus derefenceing themselves.
This is useful for global variables that may be expensive to create,
should only be created once, and may not be used in any particular session.
* New ``xon.sh`` script added for launching xonsh from a sh environment.
This should be used if the normal ``xonsh`` script does not work for
some reason.
**Changed:**
* ``$XONSH_DEBUG`` will now supress amalgamted imports. This usually needs to be
set in the calling environment or prior to *any* xonsh imports.
* Restuctured ``xonsh.platform`` to be fully lazy.
* Restuctured ``xonsh.ansi_colors`` to be fully lazy.
* Ensured the ``pygments`` and ``xonsh.pyghooks`` are not imported until
actually needed.
* Yacc parser is now loaded in a background thread.
**Deprecated:** None
**Removed:** None
* The ``'console_scripts'`` option to setuptools has been removed. It was found
to cause slowdowns of over 150 ms on every startup.
**Fixed:** None
**Security:** None

2
scripts/xon.sh Executable file
View file

@ -0,0 +1,2 @@
#!/bin/sh
/usr/bin/env PYTHONUNBUFFERED=1 python3 -u -m xonsh $@

View file

@ -33,9 +33,8 @@ try:
except ImportError:
HAVE_JUPYTER = False
from xonsh import __version__ as XONSH_VERSION
TABLES = ['xonsh/lexer_table.py', 'xonsh/parser_table.py']
TABLES = ['xonsh/lexer_table.py', 'xonsh/parser_table.py', 'xonsh/__amalgam__.py']
def clean_tables():
@ -43,9 +42,12 @@ def clean_tables():
for f in TABLES:
if os.path.isfile(f):
os.remove(f)
print('Remove ' + f)
print('Removed ' + f)
os.environ['XONSH_DEBUG'] = '1'
from xonsh import __version__ as XONSH_VERSION
def build_tables():
"""Build the lexer/parser modules."""
print('Building lexer and parser tables.')
@ -53,6 +55,8 @@ def build_tables():
from xonsh.parser import Parser
Parser(lexer_table='lexer_table', yacc_table='parser_table',
outputdir='xonsh')
import amalgamate
amalgamate.main(['amalgamate', '--debug=XONSH_DEBUG', 'xonsh'])
sys.path.pop(0)
@ -159,6 +163,11 @@ def main():
pass
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), 'r') as f:
readme = f.read()
scripts = ['scripts/xon.sh']
if 'win' in sys.platform:
scripts.append('scripts/xonsh.bat')
else:
scripts.append('scripts/xonsh')
skw = dict(
name='xonsh',
description='A general purpose, Python-ish shell',
@ -175,18 +184,25 @@ def main():
'xonsh.xoreutils', 'xontrib', 'xonsh.completers'],
package_dir={'xonsh': 'xonsh', 'xontrib': 'xontrib'},
package_data={'xonsh': ['*.json'], 'xontrib': ['*.xsh']},
cmdclass=cmdclass
cmdclass=cmdclass,
scripts=scripts,
)
if HAVE_SETUPTOOLS:
# WARNING!!! Do not use setuptools 'console_scripts'
# It validates the depenendcies (of which we have none) everytime the
# 'xonsh' command is run. This validation adds ~0.2 sec. to the startup
# time of xonsh - for every single xonsh run. This prevents us from
# reaching the goal of a startup time of < 0.1 sec. So never ever write
# the following:
#
# 'console_scripts': ['xonsh = xonsh.main:main'],
#
# END WARNING
skw['entry_points'] = {
'pygments.lexers': ['xonsh = xonsh.pyghooks:XonshLexer',
'xonshcon = xonsh.pyghooks:XonshConsoleLexer'],
'console_scripts': ['xonsh = xonsh.main:main'],
}
skw['cmdclass']['develop'] = xdevelop
else:
skw['scripts'] = ['scripts/xonsh'] if 'win' not in sys.platform else ['scripts/xonsh.bat']
setup(**skw)

View file

@ -100,7 +100,7 @@ def test_show_cmd():
"""Run and evaluate the output of the given show command."""
stdout.seek(0, io.SEEK_SET)
stdout.truncate()
history._main(hist, hist_args)
history._hist_main(hist, hist_args)
stdout.seek(0, io.SEEK_SET)
hist_lines = stdout.readlines()
yield assert_equal, len(commands), len(hist_lines)

View file

@ -7,7 +7,7 @@ import nose
from nose.tools import assert_equal, assert_is_instance
assert_equal.__self__.maxDiff = None
from xonsh.lazyjson import index, dump, LazyJSON, Node
from xonsh.lazyjson import index, ljdump, LazyJSON, LJNode
def test_index_int():
exp = {'offsets': 0, 'sizes': 2}
@ -63,7 +63,7 @@ def test_index_dict_dict_int():
def test_lazy_load_index():
f = StringIO()
dump({'wakka': 42}, f)
ljdump({'wakka': 42}, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal({'wakka': 10, '__total__': 0}, lj.offsets)
@ -71,14 +71,14 @@ def test_lazy_load_index():
def test_lazy_int():
f = StringIO()
dump(42, f)
ljdump(42, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(42, lj.load())
def test_lazy_str():
f = StringIO()
dump('wakka', f)
ljdump('wakka', f)
f.seek(0)
lj = LazyJSON(f)
assert_equal('wakka', lj.load())
@ -86,7 +86,7 @@ def test_lazy_str():
def test_lazy_list_empty():
x = []
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(0, len(lj))
@ -95,7 +95,7 @@ def test_lazy_list_empty():
def test_lazy_list_ints():
x = [0, 1, 6, 28, 496, 8128]
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(28, lj[3])
@ -106,7 +106,7 @@ def test_lazy_list_ints():
def test_lazy_list_ints():
x = [0, 1, 6, 28, 496, 8128]
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(28, lj[3])
@ -117,7 +117,7 @@ def test_lazy_list_ints():
def test_lazy_list_str():
x = ['I', 'have', 'seen', 'the', 'wind', 'blow']
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal('the', lj[3])
@ -128,7 +128,7 @@ def test_lazy_list_str():
def test_lazy_list_ints():
x = [0, 1, 6, 28, 496, 8128]
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(28, lj[3])
@ -139,10 +139,10 @@ def test_lazy_list_ints():
def test_lazy_list_list_ints():
x = [[0, 1], [6, 28], [496, 8128]]
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_is_instance(lj[1], Node)
assert_is_instance(lj[1], LJNode)
assert_equal(28, lj[1][1])
assert_equal([6, 28], lj[1].load())
assert_equal(x, lj.load())
@ -150,7 +150,7 @@ def test_lazy_list_list_ints():
def test_lazy_dict_empty():
x = {}
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(0, len(lj))
@ -158,7 +158,7 @@ def test_lazy_dict_empty():
def test_lazy_dict():
f = StringIO()
dump({'wakka': 42}, f)
ljdump({'wakka': 42}, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(['wakka'], list(lj.keys()))
@ -169,11 +169,11 @@ def test_lazy_dict():
def test_lazy_dict_dict_int():
x = {'wakka': {'jawaka': 42}}
f = StringIO()
dump(x, f)
ljdump(x, f)
f.seek(0)
lj = LazyJSON(f)
assert_equal(['wakka'], list(lj.keys()))
assert_is_instance(lj['wakka'], Node)
assert_is_instance(lj['wakka'], LJNode)
assert_equal(42, lj['wakka']['jawaka'])
assert_equal(1, len(lj))
assert_equal(x, lj.load())

View file

@ -1 +1,91 @@
__version__ = '0.3.4'
# amalgamate exclude jupyter_kernel parser_table parser_test_table pyghooks
# amalgamate exclude winutils wizard
import os as _os
if _os.getenv('XONSH_DEBUG', ''):
pass
else:
import sys as _sys
try:
from xonsh import __amalgam__
completer = __amalgam__
_sys.modules['xonsh.completer'] = __amalgam__
lazyasd = __amalgam__
_sys.modules['xonsh.lazyasd'] = __amalgam__
lazyjson = __amalgam__
_sys.modules['xonsh.lazyjson'] = __amalgam__
pretty = __amalgam__
_sys.modules['xonsh.pretty'] = __amalgam__
timings = __amalgam__
_sys.modules['xonsh.timings'] = __amalgam__
ansi_colors = __amalgam__
_sys.modules['xonsh.ansi_colors'] = __amalgam__
codecache = __amalgam__
_sys.modules['xonsh.codecache'] = __amalgam__
openpy = __amalgam__
_sys.modules['xonsh.openpy'] = __amalgam__
platform = __amalgam__
_sys.modules['xonsh.platform'] = __amalgam__
teepty = __amalgam__
_sys.modules['xonsh.teepty'] = __amalgam__
jobs = __amalgam__
_sys.modules['xonsh.jobs'] = __amalgam__
parser = __amalgam__
_sys.modules['xonsh.parser'] = __amalgam__
tokenize = __amalgam__
_sys.modules['xonsh.tokenize'] = __amalgam__
tools = __amalgam__
_sys.modules['xonsh.tools'] = __amalgam__
vox = __amalgam__
_sys.modules['xonsh.vox'] = __amalgam__
ast = __amalgam__
_sys.modules['xonsh.ast'] = __amalgam__
contexts = __amalgam__
_sys.modules['xonsh.contexts'] = __amalgam__
diff_history = __amalgam__
_sys.modules['xonsh.diff_history'] = __amalgam__
dirstack = __amalgam__
_sys.modules['xonsh.dirstack'] = __amalgam__
foreign_shells = __amalgam__
_sys.modules['xonsh.foreign_shells'] = __amalgam__
inspectors = __amalgam__
_sys.modules['xonsh.inspectors'] = __amalgam__
lexer = __amalgam__
_sys.modules['xonsh.lexer'] = __amalgam__
proc = __amalgam__
_sys.modules['xonsh.proc'] = __amalgam__
xontribs = __amalgam__
_sys.modules['xonsh.xontribs'] = __amalgam__
environ = __amalgam__
_sys.modules['xonsh.environ'] = __amalgam__
history = __amalgam__
_sys.modules['xonsh.history'] = __amalgam__
base_shell = __amalgam__
_sys.modules['xonsh.base_shell'] = __amalgam__
replay = __amalgam__
_sys.modules['xonsh.replay'] = __amalgam__
tracer = __amalgam__
_sys.modules['xonsh.tracer'] = __amalgam__
xonfig = __amalgam__
_sys.modules['xonsh.xonfig'] = __amalgam__
aliases = __amalgam__
_sys.modules['xonsh.aliases'] = __amalgam__
readline_shell = __amalgam__
_sys.modules['xonsh.readline_shell'] = __amalgam__
built_ins = __amalgam__
_sys.modules['xonsh.built_ins'] = __amalgam__
execer = __amalgam__
_sys.modules['xonsh.execer'] = __amalgam__
imphooks = __amalgam__
_sys.modules['xonsh.imphooks'] = __amalgam__
shell = __amalgam__
_sys.modules['xonsh.shell'] = __amalgam__
main = __amalgam__
_sys.modules['xonsh.main'] = __amalgam__
del __amalgam__
except ImportError:
pass
del _sys
del _os
# amalgamate end

View file

@ -12,15 +12,15 @@ from xonsh.dirstack import cd, pushd, popd, dirs, _get_cwd
from xonsh.environ import locate_binary
from xonsh.foreign_shells import foreign_shell_data
from xonsh.jobs import jobs, fg, bg, clean_jobs
from xonsh.history import main as history_alias
from xonsh.history import history_main
from xonsh.platform import ON_ANACONDA, ON_DARWIN, ON_WINDOWS, scandir
from xonsh.proc import foreground
from xonsh.replay import main as replay_main
from xonsh.replay import replay_main
from xonsh.timings import timeit_alias
from xonsh.tools import (XonshError, argvquote, escape_windows_cmd_string,
to_bool)
from xonsh.vox import Vox
from xonsh.xontribs import main as xontribs_main
from xonsh.xontribs import xontribs_main
from xonsh.xoreutils import _which
from xonsh.completers._aliases import completer_alias
@ -457,9 +457,9 @@ def xonfig(args, stdin=None):
@foreground
def trace(args, stdin=None):
"""Runs the xonsh tracer utility."""
from xonsh.tracer import main # lazy import
from xonsh.tracer import tracermain # lazy import
try:
return main(args)
return tracermain(args)
except SystemExit:
pass
@ -509,7 +509,7 @@ def make_default_aliases():
'source-bash': ['source-foreign', 'bash', '--sourcer=source'],
'source-cmd': source_cmd,
'source-foreign': source_foreign,
'history': history_alias,
'history': history_main,
'replay': replay_main,
'!!': bang_bang,
'!n': bang_n,

File diff suppressed because it is too large Load diff

View file

@ -13,10 +13,10 @@ from ast import Module, Num, Expr, Str, Bytes, UnaryOp, UAdd, USub, Invert, \
Attribute, Global, Nonlocal, If, While, For, withitem, With, Try, \
ExceptHandler, FunctionDef, ClassDef, Starred, NodeTransformer, \
Interactive, Expression, Index, literal_eval, dump, walk, increment_lineno
from ast import Ellipsis # pylint: disable=redefined-builtin
from ast import Ellipsis as EllipsisNode
# pylint: enable=unused-import
import textwrap
from itertools import repeat
import itertools
from xonsh.tools import subproc_toks, find_next_break
from xonsh.platform import PYTHON_VERSION_INFO
@ -68,7 +68,7 @@ def get_lineno(node, default=0):
def min_line(node):
"""Computes the minimum lineno."""
node_line = get_lineno(node)
return min(map(get_lineno, walk(node), repeat(node_line)))
return min(map(get_lineno, walk(node), itertools.repeat(node_line)))
def max_line(node):
@ -83,7 +83,7 @@ def get_col(node, default=-1):
def min_col(node):
"""Computes the minimum col_offset."""
return min(map(get_col, walk(node), repeat(node.col_offset)))
return min(map(get_col, walk(node), itertools.repeat(node.col_offset)))
def max_col(node):

View file

@ -6,15 +6,14 @@ import sys
import time
import builtins
from xonsh.tools import XonshError, escape_windows_cmd_string, print_exception
from xonsh.tools import (XonshError, escape_windows_cmd_string, print_exception,
DefaultNotGiven)
from xonsh.platform import HAS_PYGMENTS, ON_WINDOWS
from xonsh.codecache import (should_use_cache, code_cache_name,
code_cache_check, get_cache_filename,
update_cache, run_compiled_code)
from xonsh.completer import Completer
from xonsh.environ import multiline_prompt, format_prompt, partial_format_prompt
if HAS_PYGMENTS:
from xonsh.pyghooks import XonshStyle
class _TeeOut(object):
@ -118,11 +117,26 @@ class BaseShell(object):
self.buffer = []
self.need_more_lines = False
self.mlprompt = None
if HAS_PYGMENTS:
env = builtins.__xonsh_env__
self.styler = XonshStyle(env.get('XONSH_COLOR_STYLE'))
else:
self.styler = None
self._styler = DefaultNotGiven
@property
def styler(self):
if self._styler is DefaultNotGiven:
if HAS_PYGMENTS:
from xonsh.pyghooks import XonshStyle
env = builtins.__xonsh_env__
self._styler = XonshStyle(env.get('XONSH_COLOR_STYLE'))
else:
self._styler = None
return self._styler
@styler.setter
def styler(self, value):
self._styler = value
@styler.deleter
def styler(self):
self._styler = DefaultNotGiven
def emptyline(self):
"""Called when an empty line has been entered."""

View file

@ -19,12 +19,13 @@ import sys
import tempfile
import time
from xonsh.lazyasd import LazyObject
from xonsh.history import History
from xonsh.tokenize import SearchPath
from xonsh.inspectors import Inspector
from xonsh.aliases import Aliases, make_default_aliases
from xonsh.environ import Env, default_env, locate_binary
from xonsh.foreign_shells import load_foreign_aliases
from xonsh.history import History
from xonsh.inspectors import Inspector
from xonsh.tokenize import SearchPath
from xonsh.jobs import add_job, wait_for_active_job
from xonsh.platform import ON_POSIX, ON_WINDOWS
from xonsh.proc import (ProcProxy, SimpleProcProxy, ForegroundProcProxy,
@ -38,7 +39,7 @@ from xonsh.tools import (
ENV = None
BUILTINS_LOADED = False
INSPECTOR = Inspector()
INSPECTOR = LazyObject(Inspector, globals(), 'INSPECTOR')
AT_EXIT_SIGNALS = (signal.SIGABRT, signal.SIGFPE, signal.SIGILL, signal.SIGSEGV,
signal.SIGTERM)
@ -721,7 +722,8 @@ def load_builtins(execer=None, config=None, login=False, ctx=None):
def _lastflush(s=None, f=None):
builtins.__xonsh_history__.flush(at_exit=True)
if hasattr(builtins, '__xonsh_history__'):
builtins.__xonsh_history__.flush(at_exit=True)
def unload_builtins():

View file

@ -4,6 +4,8 @@ import hashlib
import marshal
import builtins
from xonsh.lazyasd import LazyObject
def _splitpath(path, sofar=[]):
folder, path = os.path.split(path)
if path == "":
@ -13,8 +15,15 @@ def _splitpath(path, sofar=[]):
else:
return _splitpath(folder, sofar + [path])
_CHARACTER_MAP = {chr(o): '_%s' % chr(o+32) for o in range(65, 91)}
_CHARACTER_MAP.update({'.': '_.', '_': '__'})
def _character_map():
cmap = {chr(o): '_%s' % chr(o+32) for o in range(65, 91)}
cmap.update({'.': '_.', '_': '__'})
return cmap
_CHARACTER_MAP = LazyObject(_character_map, globals(), '_CHARACTER_MAP')
del _character_map
def _cache_renamer(path, code=False):
@ -133,7 +142,7 @@ def run_script_with_cache(filename, execer, glb=None, loc=None, mode='exec'):
with open(filename, 'r') as f:
code = f.read()
ccode = compile_code(filename, code, execer, glb, loc, mode)
update_cache(ccode, cachefname)
update_cache(ccode, cachefname)
run_compiled_code(ccode, glb, loc, mode)
@ -162,7 +171,7 @@ def code_cache_check(cachefname):
with open(cachefname, 'rb') as cfile:
ccode = marshal.load(cfile)
run_cached = True
return run_cached, ccode
return run_cached, ccode
def run_code_with_cache(code, execer, glb=None, loc=None, mode='exec'):
@ -178,5 +187,5 @@ def run_code_with_cache(code, execer, glb=None, loc=None, mode='exec'):
run_cached, ccode = code_cache_check(cachefname)
if not run_cached:
ccode = compile_code(filename, code, execer, glb, loc, mode)
update_cache(ccode, cachefname)
update_cache(ccode, cachefname)
run_compiled_code(ccode, glb, loc, mode)

View file

@ -1,16 +1,15 @@
# -*- coding: utf-8 -*-
"""A (tab-)completer for xonsh."""
import builtins
import collections.abc as abc
from collections import Sequence
from xonsh.completers.bash import update_bash_completion
import xonsh.completers.bash as compbash
class Completer(object):
"""This provides a list of optional completions for the xonsh shell."""
def __init__(self):
update_bash_completion()
compbash.update_bash_completion()
def complete(self, prefix, line, begidx, endidx, ctx=None):
"""Complete the string, given a possible execution context.
@ -42,7 +41,7 @@ class Completer(object):
out = func(prefix, line, begidx, endidx, ctx)
except StopIteration:
return set(), len(prefix)
if isinstance(out, Sequence):
if isinstance(out, abc.Sequence):
res, lprefix = out
else:
res = out

View file

@ -35,7 +35,8 @@ for ((i=0;i<${{#COMPREPLY[*]}};i++)) do echo ${{COMPREPLY[i]}}; done
if ON_WINDOWS:
from xonsh.platform import WINDOWS_BASH_COMMAND as BASH_COMMAND
from xonsh.platform import windows_bash_command
BASH_COMMAND = windows_bash_command()
else:
BASH_COMMAND = 'bash'

View file

@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
"""Tools for diff'ing two xonsh history files in a meaningful fashion."""
from datetime import datetime
from itertools import zip_longest
from difflib import SequenceMatcher
import difflib
import datetime
import itertools
from xonsh import lazyjson
from xonsh.lazyjson import LazyJSON
from xonsh.tools import print_color
NO_COLOR = '{NO_COLOR}'
@ -22,7 +22,7 @@ EQUAL = 'equal'
def bold_str_diff(a, b, sm=None):
if sm is None:
sm = SequenceMatcher()
sm = difflib.SequenceMatcher()
aline = RED + '- '
bline = GREEN + '+ '
sm.set_seqs(a, b)
@ -53,12 +53,12 @@ def greenline(line):
def highlighted_ndiff(a, b):
"""Returns a highlited string, with bold charaters where different."""
s = ''
sm = SequenceMatcher()
sm = difflib.SequenceMatcher()
sm.set_seqs(a, b)
linesm = SequenceMatcher()
linesm = difflib.SequenceMatcher()
for tag, i1, i2, j1, j2 in sm.get_opcodes():
if tag == REPLACE:
for aline, bline in zip_longest(a[i1:i2], b[j1:j2]):
for aline, bline in itertools.zip_longest(a[i1:i2], b[j1:j2]):
if bline is None:
s += redline(aline)
elif aline is None:
@ -97,10 +97,10 @@ class HistoryDiffer(object):
verbose : bool, optional
Whether to print a verbose amount of information.
"""
self.a = lazyjson.LazyJSON(afile, reopen=reopen)
self.b = lazyjson.LazyJSON(bfile, reopen=reopen)
self.a = LazyJSON(afile, reopen=reopen)
self.b = LazyJSON(bfile, reopen=reopen)
self.verbose = verbose
self.sm = SequenceMatcher(autojunk=False)
self.sm = difflib.SequenceMatcher(autojunk=False)
def __del__(self):
self.a.close()
@ -114,10 +114,10 @@ class HistoryDiffer(object):
s += ' (' + lj['sessionid'] + ')'
s += ' [locked]' if lj['locked'] else ' [unlocked]'
ts = lj['ts'].load()
ts0 = datetime.fromtimestamp(ts[0])
ts0 = datetime.datetime.fromtimestamp(ts[0])
s += ' started: ' + ts0.isoformat(' ')
if ts[1] is not None:
ts1 = datetime.fromtimestamp(ts[1])
ts1 = datetime.datetime.fromtimestamp(ts[1])
s += ' stopped: ' + ts1.isoformat(' ') + ' runtime: ' + str(ts1 - ts0)
return s
@ -235,8 +235,9 @@ class HistoryDiffer(object):
s = ''
for tag, i1, i2, j1, j2 in sm.get_opcodes():
if tag == REPLACE:
for i, ainp, j, binp in zip_longest(range(i1, i2), ainps[i1:i2],
range(j1, j2), binps[j1:j2]):
zipper = itertools.zip_longest
for i, ainp, j, binp in zipper(range(i1, i2), ainps[i1:i2],
range(j1, j2), binps[j1:j2]):
if j is None:
s += self._cmd_in_one_diff(ainp, i, self.a, aid, RED)
elif i is None:
@ -278,7 +279,7 @@ class HistoryDiffer(object):
_HD_PARSER = None
def _create_parser(p=None):
def _dh_create_parser(p=None):
global _HD_PARSER
p_was_none = (p is None)
if _HD_PARSER is not None and p_was_none:
@ -297,17 +298,17 @@ def _create_parser(p=None):
return p
def _main_action(ns, hist=None):
def _dh_main_action(ns, hist=None):
hd = HistoryDiffer(ns.a, ns.b, reopen=ns.reopen, verbose=ns.verbose)
print_color(hd.format())
def main(args=None, stdin=None):
def diff_history_main(args=None, stdin=None):
"""Main entry point for history diff'ing"""
parser = _create_parser()
ns = parser.parse_args(args)
_main_action(ns)
_dh_main_action(ns)
if __name__ == '__main__':
main()
diff_history_main()

View file

@ -1,10 +1,11 @@
# -*- coding: utf-8 -*-
"""Directory stack and associated utilities for the xonsh shell."""
import os
import glob
import argparse
import builtins
from glob import iglob
from argparse import ArgumentParser
from xonsh.lazyasd import LazyObject
from xonsh.tools import get_sep
DIRSTACK = []
@ -49,7 +50,7 @@ def _try_cdpath(apath):
cdpaths = env.get('CDPATH')
for cdp in cdpaths:
globber = builtins.__xonsh_expand_path__(os.path.join(cdp, apath))
for cdpath_prefixed_path in iglob(globber):
for cdpath_prefixed_path in glob.iglob(globber):
return cdpath_prefixed_path
return apath
@ -105,6 +106,25 @@ def cd(args, stdin=None):
return None, None, 0
def _pushd_parser():
parser = argparse.ArgumentParser(prog="pushd")
parser.add_argument('dir', nargs='?')
parser.add_argument('-n',
dest='cd',
help='Suppresses the normal change of directory when'
' adding directories to the stack, so that only the'
' stack is manipulated.',
action='store_false')
parser.add_argument('-q',
dest='quiet',
help='Do not call dirs, regardless of $PUSHD_SILENT',
action='store_true')
return parser
pushd_parser = LazyObject(_pushd_parser, globals(), 'pushd_parser')
del _pushd_parser
def pushd(args, stdin=None):
"""xonsh command: pushd
@ -181,6 +201,26 @@ def pushd(args, stdin=None):
return None, None, 0
def _popd_parser():
parser = argparse.ArgumentParser(prog="popd")
parser.add_argument('dir', nargs='?')
parser.add_argument('-n',
dest='cd',
help='Suppresses the normal change of directory when'
' adding directories to the stack, so that only the'
' stack is manipulated.',
action='store_false')
parser.add_argument('-q',
dest='quiet',
help='Do not call dirs, regardless of $PUSHD_SILENT',
action='store_true')
return parser
popd_parser = LazyObject(_popd_parser, globals(), 'popd_parser')
del _popd_parser
def popd(args, stdin=None):
"""
xonsh command: popd
@ -250,6 +290,36 @@ def popd(args, stdin=None):
return None, None, 0
def _dirs_parser():
parser = argparse.ArgumentParser(prog="dirs")
parser.add_argument('N', nargs='?')
parser.add_argument('-c',
dest='clear',
help='Clears the directory stack by deleting all of'
' the entries.',
action='store_true')
parser.add_argument('-p',
dest='print_long',
help='Print the directory stack with one entry per'
' line.',
action='store_true')
parser.add_argument('-v',
dest='verbose',
help='Print the directory stack with one entry per'
' line, prefixing each entry with its index in the'
' stack.',
action='store_true')
parser.add_argument('-l',
dest='long',
help='Produces a longer listing; the default listing'
' format uses a tilde to denote the home directory.',
action='store_true')
return parsers
dirs_parser = LazyObject(_dirs_parser, globals(), 'dirs_parser')
del _dirs_parser
def dirs(args, stdin=None):
"""xonsh command: dirs
@ -321,54 +391,3 @@ def dirs(args, stdin=None):
out = o[idx]
return out + '\n', None, 0
pushd_parser = ArgumentParser(prog="pushd")
pushd_parser.add_argument('dir', nargs='?')
pushd_parser.add_argument('-n',
dest='cd',
help='Suppresses the normal change of directory when'
' adding directories to the stack, so that only the'
' stack is manipulated.',
action='store_false')
pushd_parser.add_argument('-q',
dest='quiet',
help='Do not call dirs, regardless of $PUSHD_SILENT',
action='store_true')
popd_parser = ArgumentParser(prog="popd")
popd_parser.add_argument('dir', nargs='?')
popd_parser.add_argument('-n',
dest='cd',
help='Suppresses the normal change of directory when'
' adding directories to the stack, so that only the'
' stack is manipulated.',
action='store_false')
popd_parser.add_argument('-q',
dest='quiet',
help='Do not call dirs, regardless of $PUSHD_SILENT',
action='store_true')
dirs_parser = ArgumentParser(prog="dirs")
dirs_parser.add_argument('N', nargs='?')
dirs_parser.add_argument('-c',
dest='clear',
help='Clears the directory stack by deleting all of'
' the entries.',
action='store_true')
dirs_parser.add_argument('-p',
dest='print_long',
help='Print the directory stack with one entry per'
' line.',
action='store_true')
dirs_parser.add_argument('-v',
dest='verbose',
help='Print the directory stack with one entry per'
' line, prefixing each entry with its index in the'
' stack.',
action='store_true')
dirs_parser.add_argument('-l',
dest='long',
help='Produces a longer listing; the default listing'
' format uses a tilde to denote the home directory.',
action='store_true')

View file

@ -27,7 +27,7 @@ from xonsh.foreign_shells import load_foreign_envs
from xonsh.platform import (BASH_COMPLETIONS_DEFAULT, ON_ANACONDA, ON_LINUX,
ON_WINDOWS, DEFAULT_ENCODING, ON_CYGWIN, PATH_DEFAULT)
from xonsh.tools import (
IS_SUPERUSER, always_true, always_false, ensure_string, is_env_path,
is_superuser, always_true, always_false, ensure_string, is_env_path,
str_to_env_path, env_path_to_str, is_bool, to_bool, bool_to_str,
is_history_tuple, to_history_tuple, history_tuple_to_str, is_float,
is_string, is_callable, is_string_or_callable,
@ -70,7 +70,8 @@ def to_debug(x):
execer's debug level.
"""
val = to_bool_or_int(x)
builtins.__xonsh_execer__.debug_level = val
if hasattr(builtins, '__xonsh_execer__'):
builtins.__xonsh_execer__.debug_level = val
return val
Ensurer = namedtuple('Ensurer', ['validate', 'convert', 'detype'])
@ -533,7 +534,8 @@ DEFAULT_DOCS = {
'XONSH_DEBUG': VarDocs(
'Sets the xonsh debugging level. This may be an integer or a boolean, '
'with higher values cooresponding to higher debuging levels and more '
'information presented.', configurable=False),
'information presented. Setting this variable prior to stating xonsh '
'will supress amalgamated imports.', configurable=False),
'XONSH_DATA_DIR': VarDocs(
'This is the location where xonsh data files are stored, such as '
'history.', default="'$XDG_DATA_HOME/xonsh'"),
@ -1138,7 +1140,7 @@ else:
FORMATTER_DICT = dict(
user=os.environ.get(USER, '<user>'),
prompt_end='#' if IS_SUPERUSER else '$',
prompt_end='#' if is_superuser() else '$',
hostname=socket.gethostname().split('.', 1)[0],
cwd=_dynamically_collapsed_pwd,
cwd_dir=lambda: os.path.dirname(_replace_home_cwd()),
@ -1418,7 +1420,6 @@ def foreign_env_fixes(ctx):
if 'PROMPT' in ctx:
del ctx['PROMPT']
def default_env(env=None, config=None, login=True):
"""Constructs a default xonsh environment."""
# in order of increasing precedence

View file

@ -8,7 +8,7 @@ import builtins
import warnings
from collections import Mapping
from xonsh import ast
from xonsh.ast import CtxAwareTransformer
from xonsh.parser import Parser
from xonsh.tools import subproc_toks, find_next_break
from xonsh.built_ins import load_builtins, unload_builtins
@ -39,7 +39,7 @@ class Execer(object):
self.filename = filename
self.debug_level = debug_level
self.unload = unload
self.ctxtransformer = ast.CtxAwareTransformer(self.parser)
self.ctxtransformer = CtxAwareTransformer(self.parser)
load_builtins(execer=self, config=config, login=login, ctx=xonsh_ctx)
def __del__(self):

View file

@ -5,11 +5,11 @@ import sys
import re
import json
import shlex
import tempfile
import builtins
import subprocess
from warnings import warn
from functools import lru_cache
from tempfile import NamedTemporaryFile
from collections import MutableMapping, Mapping, Sequence
from xonsh.tools import to_bool, ensure_string
@ -234,7 +234,7 @@ def foreign_shell_data(shell, interactive=True, login=False, envcmd=None,
if not use_tmpfile:
cmd.append(command)
else:
tmpfile = NamedTemporaryFile(suffix=tmpfile_ext, delete=False)
tmpfile = tempfile.NamedTemporaryFile(suffix=tmpfile_ext, delete=False)
tmpfile.write(command.encode('utf8'))
tmpfile.close()
cmd.append(tmpfile.name)

View file

@ -10,9 +10,9 @@ from glob import iglob
from collections import deque, Sequence, OrderedDict
from threading import Thread, Condition
from xonsh import lazyjson
from xonsh.lazyjson import LazyJSON, ljdump, LJNode
from xonsh.tools import ensure_int_or_slice, to_history_tuple
from xonsh import diff_history
from xonsh.diff_history import _dh_create_parser, _dh_main_action
def _gc_commands_to_rmfiles(hsize, files):
@ -116,12 +116,12 @@ class HistoryGC(Thread):
files = []
for f in fs:
try:
lj = lazyjson.LazyJSON(f, reopen=False)
lj = LazyJSON(f, reopen=False)
if only_unlocked and lj['locked']:
continue
# info: closing timestamp, number of commands, filename
files.append((lj['ts'][1] or time.time(),
len(lj.sizes['cmds']) - 1,
files.append((lj['ts'][1] or time.time(),
len(lj.sizes['cmds']) - 1,
f))
lj.close()
except (IOError, OSError, ValueError):
@ -162,13 +162,13 @@ class HistoryFlusher(Thread):
def dump(self):
"""Write the cached history to external storage."""
with open(self.filename, 'r', newline='\n') as f:
hist = lazyjson.LazyJSON(f).load()
hist = LazyJSON(f).load()
hist['cmds'].extend(self.buffer)
if self.at_exit:
hist['ts'][1] = time.time() # apply end time
hist['locked'] = False
with open(self.filename, 'w', newline='\n') as f:
lazyjson.dump(hist, f, sort_keys=True)
ljdump(hist, f, sort_keys=True)
class CommandField(Sequence):
@ -217,9 +217,9 @@ class CommandField(Sequence):
with self.hist._cond:
self.hist._cond.wait_for(self.i_am_at_the_front)
with open(self.hist.filename, 'r', newline='\n') as f:
lj = lazyjson.LazyJSON(f, reopen=False)
lj = LazyJSON(f, reopen=False)
rtn = lj['cmds'][key].get(self.field, self.default)
if isinstance(rtn, lazyjson.Node):
if isinstance(rtn, LJNode):
rtn = rtn.load()
queue.popleft()
return rtn
@ -271,7 +271,7 @@ class History(object):
meta['cmds'] = []
meta['sessionid'] = str(sid)
with open(self.filename, 'w', newline='\n') as f:
lazyjson.dump(meta, f, sort_keys=True)
ljdump(meta, f, sort_keys=True)
self.gc = HistoryGC() if gc else None
# command fields that are known
self.tss = CommandField('ts', self)
@ -338,7 +338,7 @@ class History(object):
# Interface to History
#
@functools.lru_cache()
def _create_parser():
def _hist_create_parser():
"""Create a parser for the "history" command."""
p = argparse.ArgumentParser(prog='history',
description='Tools for dealing with history')
@ -362,12 +362,12 @@ def _create_parser():
help='print in JSON format')
# diff
diff = subp.add_parser('diff', help='diffs two xonsh history files')
diff_history._create_parser(p=diff)
_dh_create_parser(p=diff)
# replay, dynamically
from xonsh import replay
rp = subp.add_parser('replay', help='replays a xonsh history file')
replay._create_parser(p=rp)
_MAIN_ACTIONS['replay'] = replay._main_action
replay._rp_create_parser(p=rp)
_HIST_MAIN_ACTIONS['replay'] = replay._rp_main_action
# gc
gcp = subp.add_parser('gc', help='launches a new history garbage collector')
gcp.add_argument('--size', nargs=2, dest='size', default=None,
@ -431,31 +431,31 @@ def _gc(ns, hist):
continue
_MAIN_ACTIONS = {
_HIST_MAIN_ACTIONS = {
'show': _show,
'id': lambda ns, hist: print(hist.sessionid),
'file': lambda ns, hist: print(hist.filename),
'info': _info,
'diff': diff_history._main_action,
'diff': _dh_main_action,
'gc': _gc,
}
def _main(hist, args):
def _hist_main(hist, args):
"""This implements the history CLI."""
if not args or (args[0] not in _MAIN_ACTIONS and
if not args or (args[0] not in _HIST_MAIN_ACTIONS and
args[0] not in {'-h', '--help'}):
args.insert(0, 'show')
if (args[0] == 'show' and len(args) > 1 and args[-1].startswith('-') and
args[-1][1].isdigit()):
args.insert(-1, '--') # ensure parsing stops before a negative int
ns = _create_parser().parse_args(args)
ns = _hist_create_parser().parse_args(args)
if ns.action is None: # apply default action
ns = _create_parser().parse_args(['show'] + args)
_MAIN_ACTIONS[ns.action](ns, hist)
ns = _hist_create_parser().parse_args(['show'] + args)
_HIST_MAIN_ACTIONS[ns.action](ns, hist)
def main(args=None, stdin=None):
def history_main(args=None, stdin=None):
"""This is the history command entry point."""
_ = stdin
_main(builtins.__xonsh_history__, args) # pylint: disable=no-member
_hist_main(builtins.__xonsh_history__, args) # pylint: disable=no-member

View file

@ -16,15 +16,19 @@ import linecache
import os
import sys
import types
import importlib
from xonsh import openpy
from xonsh.lazyasd import LazyObject
from xonsh.tokenize import detect_encoding
from xonsh.openpy import read_py_file
from xonsh.tools import (cast_unicode, safe_hasattr, indent,
print_color, format_color)
from xonsh.platform import HAS_PYGMENTS, PYTHON_VERSION_INFO
if HAS_PYGMENTS:
import pygments
from xonsh import pyghooks
pygments = LazyObject(lambda: importlib.import_module('pygments'),
globals(), 'pygments')
pyghooks = LazyObject(lambda: importlib.import_module('xonsh.pyghooks'),
globals(), 'pyghooks')
# builtin docstrings to ignore
@ -76,7 +80,7 @@ def get_encoding(obj):
# getsourcelines returns lineno with 1-offset and page() uses
# 0-offset, so we must adjust.
with stdlib_io.open(ofile, 'rb') as buf: # Tweaked to use io.open for Python 2
encoding, _ = openpy.detect_encoding(buf.readline)
encoding, _ = detect_encoding(buf.readline)
return encoding
@ -414,7 +418,7 @@ class Inspector(object):
# Print only text files, not extension binaries. Note that
# getsourcelines returns lineno with 1-offset and page() uses
# 0-offset, so we must adjust.
o = openpy.read_py_file(ofile, skip_encoding_cookie=False)
o = read_py_file(ofile, skip_encoding_cookie=False)
print(o, lineno - 1)
def _format_fields_str(self, fields, title_width=0):

View file

@ -3,14 +3,17 @@
import os
import sys
import time
import ctypes
import signal
import builtins
from subprocess import TimeoutExpired, check_output
from collections import deque
import functools
import subprocess
import collections
from xonsh.lazyasd import LazyObject
from xonsh.platform import ON_DARWIN, ON_WINDOWS, ON_CYGWIN
tasks = deque()
tasks = LazyObject(collections.deque, globals(), 'tasks')
# Track time stamp of last exit command, so that two consecutive attempts to
# exit can kill all jobs and exit.
_last_exit_time = None
@ -25,10 +28,8 @@ if ON_DARWIN:
if pid is None: # the pid of an aliased proc is None
continue
os.kill(pid, signal)
elif ON_WINDOWS:
pass
elif ON_CYGWIN:
# Similar to what happened on OSX, more issues on Cygwin
# (see Github issue #514).
@ -41,7 +42,6 @@ elif ON_CYGWIN:
os.kill(pid, signal)
except:
pass
else:
def _send_signal(job, signal):
os.killpg(job['pgrp'], signal)
@ -52,7 +52,8 @@ if ON_WINDOWS:
job['status'] = "running"
def _kill(job):
check_output(['taskkill', '/F', '/T', '/PID', str(job['obj'].pid)])
subprocess.check_output(['taskkill', '/F', '/T', '/PID',
str(job['obj'].pid)])
def ignore_sigtstp():
pass
@ -80,7 +81,7 @@ if ON_WINDOWS:
while obj.returncode is None:
try:
obj.wait(0.01)
except TimeoutExpired:
except subprocess.TimeoutExpired:
pass
except KeyboardInterrupt:
_kill(active_task)
@ -105,21 +106,36 @@ else:
_shell_pgrp = os.getpgrp()
_block_when_giving = (signal.SIGTTOU, signal.SIGTTIN,
signal.SIGTSTP, signal.SIGCHLD)
_block_when_giving = LazyObject(lambda: (signal.SIGTTOU, signal.SIGTTIN,
signal.SIGTSTP, signal.SIGCHLD),
globals(), '_block_when_giving')
# check for shell tty
@functools.lru_cache(1)
def _shell_tty():
try:
_st = sys.stderr.fileno()
if os.tcgetpgrp(_st) != os.getpgid(os.getpid()):
# we don't own it
_st = None
except OSError:
_st = None
return _st
# _give_terminal_to is a simplified version of:
# give_terminal_to from bash 4.3 source, jobs.c, line 4030
# this will give the terminal to the process group pgid
if ON_CYGWIN:
import ctypes
_libc = ctypes.CDLL('cygwin1.dll')
_libc = LazyObject(lambda: ctypes.CDLL('cygwin1.dll'),
globals(), '_libc')
# on cygwin, signal.pthread_sigmask does not exist in Python, even
# though pthread_sigmask is defined in the kernel. thus, we use
# ctypes to mimic the calls in the "normal" version below.
def _give_terminal_to(pgid):
if _shell_tty is not None and os.isatty(_shell_tty):
st = _shell_tty()
if st is not None and os.isatty(st):
omask = ctypes.c_ulong()
mask = ctypes.c_ulong()
_libc.sigemptyset(ctypes.byref(mask))
@ -129,25 +145,18 @@ else:
_libc.sigprocmask(ctypes.c_int(signal.SIG_BLOCK),
ctypes.byref(mask),
ctypes.byref(omask))
_libc.tcsetpgrp(ctypes.c_int(_shell_tty), ctypes.c_int(pgid))
_libc.tcsetpgrp(ctypes.c_int(shtty), ctypes.c_int(pgid))
_libc.sigprocmask(ctypes.c_int(signal.SIG_SETMASK),
ctypes.byref(omask), None)
else:
def _give_terminal_to(pgid):
if _shell_tty is not None and os.isatty(_shell_tty):
st = _shell_tty()
if st is not None and os.isatty(st):
oldmask = signal.pthread_sigmask(signal.SIG_BLOCK,
_block_when_giving)
os.tcsetpgrp(_shell_tty, pgid)
os.tcsetpgrp(st, pgid)
signal.pthread_sigmask(signal.SIG_SETMASK, oldmask)
# check for shell tty
try:
_shell_tty = sys.stderr.fileno()
if os.tcgetpgrp(_shell_tty) != os.getpgid(os.getpid()):
# we don't own it
_shell_tty = None
except OSError:
_shell_tty = None
def wait_for_active_job():
"""

164
xonsh/lazyasd.py Normal file
View file

@ -0,0 +1,164 @@
"""Lazy and self destrctive containers for speeding up module import."""
import collections.abc as abc
class LazyObject(object):
def __init__(self, load, ctx, name):
"""Lazily loads an object via the load function the first time an
attribute is accessed. Once loaded it will replace itself in the
provided context (typically the globals of the call site) with the
given name.
For example, you can prevent the compilation of a regular expreession
until it is actually used::
DOT = LazyObject((lambda: re.compile('.')), globals(), 'DOT')
Parameters
----------
load : function with no arguments
A loader function that performs the actual object construction.
ctx : Mapping
Context to replace the LazyAndSelfDestructiveObject instance in
with the object returned by load().
name : str
Name in the context to give the loaded object. This *should*
be the name on the LHS of the assignment.
"""
self._lasdo = {
'loaded': False,
'load': load,
'ctx': ctx,
'name': name,
}
def _lazy_obj(self):
d = self._lasdo
if d['loaded']:
obj = d['obj']
else:
obj = d['load']()
d['ctx'][d['name']] = d['obj'] = obj
d['loaded'] = True
return obj
def __getattribute__(self, name):
if name == '_lasdo' or name == '_lazy_obj':
return super().__getattribute__(name)
obj = self._lazy_obj()
return getattr(obj, name)
def __bool__(self):
obj = self._lazy_obj()
return bool(obj)
def __iter__(self):
obj = self._lazy_obj()
yield from obj
class LazyDict(abc.MutableMapping):
def __init__(self, loaders, ctx, name):
"""Dictionary like object that lazily loads its values from an initial
dict of key-loader function pairs. Each key is loaded when its value
is first accessed. Once fully loaded, this object will replace itself
in the provided context (typically the globals of the call site) with
the given name.
For example, you can prevent the compilation of a bunch of regular
expressions until they are actually used::
RES = LazyDict({
'dot': lambda: re.compile('.'),
'all': lambda: re.compile('.*'),
'two': lambda: re.compile('..'),
}, globals(), 'RES')
Parameters
----------
loaders : Mapping of keys to functions with no arguments
A mapping of loader function that performs the actual value
construction upon acces.
ctx : Mapping
Context to replace the LazyAndSelfDestructiveDict instance in
with the the fully loaded mapping.
name : str
Name in the context to give the loaded mapping. This *should*
be the name on the LHS of the assignment.
"""
self._loaders = loaders
self._ctx = ctx
self._name = name
self._d = type(loaders)() # make sure to return the same type
def _destruct(self):
if len(self._loaders) == 0:
self._ctx[self._name] = self._d
def __getitem__(self, key):
d = self._d
if key in d:
val = d[key]
else:
# pop will raise a key error for us
loader = self._loaders.pop(key)
d[key] = val = loader()
self._destruct()
return val
def __setitem__(self, key, value):
self._d[key] = value
if key in self._loaders:
del self._loaders[key]
self._destruct()
def __delitem__(self, key):
if key in self._d:
del self._d[lkey]
else:
del self._loaders[key]
self._destruct()
def __iter__(self):
yield from (set(self._d.keys()) | set(self._loaders.keys()))
def __len__(self):
return len(self._d) + len(self._loaders)
class LazyBool(object):
def __init__(self, load, ctx, name):
"""Boolean like object that lazily computes it boolean value when it is
first asked. Once loaded, this result will replace itself
in the provided context (typically the globals of the call site) with
the given name.
For example, you can prevent the complex boolean until it is actually
used::
ALIVE = LazyDict(lambda: not DEAD, globals(), 'ALIVE')
Parameters
----------
load : function with no arguments
A loader function that performs the actual boolean evaluation.
ctx : Mapping
Context to replace the LazyAndSelfDestructiveDict instance in
with the the fully loaded mapping.
name : str
Name in the context to give the loaded mapping. This *should*
be the name on the LHS of the assignment.
"""
self._load = load
self._ctx = ctx
self._name = name
self._result = None
def __bool__(self):
if self._result is None:
res = self._ctx[self._name] = self._result = self._load()
else:
res = self._result
return res

View file

@ -2,10 +2,9 @@
"""Implements a lazy JSON file class that wraps around json data."""
import io
import json
from collections import Mapping, Sequence
from contextlib import contextmanager
import weakref
import contextlib
import collections.abc as abc
def _to_json_with_size(obj, offset=0, sort_keys=False):
@ -13,7 +12,7 @@ def _to_json_with_size(obj, offset=0, sort_keys=False):
s = json.dumps(obj)
o = offset
n = size = len(s.encode()) # size in bytes
elif isinstance(obj, Mapping):
elif isinstance(obj, abc.Mapping):
s = '{'
j = offset + 1
o = {}
@ -36,7 +35,7 @@ def _to_json_with_size(obj, offset=0, sort_keys=False):
n = len(s)
o['__total__'] = offset
size['__total__'] = n
elif isinstance(obj, Sequence):
elif isinstance(obj, abc.Sequence):
s = '['
j = offset + 1
o = []
@ -90,13 +89,13 @@ def dumps(obj, sort_keys=False):
return s
def dump(obj, fp, sort_keys=False):
def ljdump(obj, fp, sort_keys=False):
"""Dumps an object to JSON file."""
s = dumps(obj, sort_keys=sort_keys)
fp.write(s)
class Node(Mapping, Sequence):
class LJNode(abc.Mapping, abc.Sequence):
"""A proxy node for JSON nodes. Acts as both sequence and mapping."""
def __init__(self, offsets, sizes, root):
@ -112,8 +111,8 @@ class Node(Mapping, Sequence):
self.offsets = offsets
self.sizes = sizes
self.root = root
self.is_mapping = isinstance(self.offsets, Mapping)
self.is_sequence = isinstance(self.offsets, Sequence)
self.is_mapping = isinstance(self.offsets, abc.Mapping)
self.is_sequence = isinstance(self.offsets, abc.Sequence)
def __len__(self):
# recall that for maps, the '__total__' key is added and for
@ -139,8 +138,8 @@ class Node(Mapping, Sequence):
f.seek(self.root.dloc + offset)
s = f.read(size)
val = json.loads(s)
elif isinstance(offset, (Mapping, Sequence)):
val = Node(offset, size, self.root)
elif isinstance(offset, (abc.Mapping, abc.Sequence)):
val = LJNode(offset, size, self.root)
else:
raise TypeError('incorrect types for offset node')
return val
@ -187,7 +186,7 @@ class Node(Mapping, Sequence):
raise NotImplementedError
class LazyJSON(Node):
class LazyJSON(LJNode):
"""Represents a lazy json file. Can be used like a normal Python
dict or list.
"""
@ -206,8 +205,8 @@ class LazyJSON(Node):
self._f = open(f, 'r', newline='\n')
self._load_index()
self.root = weakref.proxy(self)
self.is_mapping = isinstance(self.offsets, Mapping)
self.is_sequence = isinstance(self.offsets, Sequence)
self.is_mapping = isinstance(self.offsets, abc.Mapping)
self.is_sequence = isinstance(self.offsets, abc.Sequence)
def __del__(self):
self.close()
@ -217,7 +216,7 @@ class LazyJSON(Node):
if not self.reopen and isinstance(self._f, io.IOBase):
self._f.close()
@contextmanager
@contextlib.contextmanager
def _open(self, *args, **kwargs):
if self.reopen and isinstance(self._f, str):
f = open(self._f, *args, **kwargs)

View file

@ -3,7 +3,6 @@
Written using a hybrid of ``tokenize`` and PLY.
"""
from io import BytesIO
from keyword import kwlist
@ -12,10 +11,54 @@ try:
except ImportError:
from xonsh.ply.lex import LexToken
from xonsh.lazyasd import LazyObject
from xonsh.platform import PYTHON_VERSION_INFO
import xonsh.tokenize as tokenize
from xonsh.tokenize import (OP, IOREDIRECT, STRING, DOLLARNAME, NUMBER,
SEARCHPATH, NEWLINE, INDENT, DEDENT, NL, COMMENT, ENCODING,
ENDMARKER, NAME, ERRORTOKEN, tokenize, TokenError)
token_map = {}
def _token_map():
tm = {}
# operators
_op_map = {
# punctuation
',': 'COMMA', '.': 'PERIOD', ';': 'SEMI', ':': 'COLON',
'...': 'ELLIPSIS',
# basic operators
'+': 'PLUS', '-': 'MINUS', '*': 'TIMES', '@': 'AT', '/': 'DIVIDE',
'//': 'DOUBLEDIV', '%': 'MOD', '**': 'POW', '|': 'PIPE',
'~': 'TILDE', '^': 'XOR', '<<': 'LSHIFT', '>>': 'RSHIFT',
'<': 'LT', '<=': 'LE', '>': 'GT', '>=': 'GE', '==': 'EQ',
'!=': 'NE', '->': 'RARROW',
# assignment operators
'=': 'EQUALS', '+=': 'PLUSEQUAL', '-=': 'MINUSEQUAL',
'*=': 'TIMESEQUAL', '@=': 'ATEQUAL', '/=': 'DIVEQUAL', '%=': 'MODEQUAL',
'**=': 'POWEQUAL', '<<=': 'LSHIFTEQUAL', '>>=': 'RSHIFTEQUAL',
'&=': 'AMPERSANDEQUAL', '^=': 'XOREQUAL', '|=': 'PIPEEQUAL',
'//=': 'DOUBLEDIVEQUAL',
# extra xonsh operators
'?': 'QUESTION', '??': 'DOUBLE_QUESTION', '@$': 'ATDOLLAR',
'&': 'AMPERSAND',
}
for (op, typ) in _op_map.items():
tm[(OP, op)] = typ
tm[IOREDIRECT] = 'IOREDIRECT'
tm[STRING] = 'STRING'
tm[DOLLARNAME] = 'DOLLAR_NAME'
tm[NUMBER] = 'NUMBER'
tm[SEARCHPATH] = 'SEARCHPATH'
tm[NEWLINE] = 'NEWLINE'
tm[INDENT] = 'INDENT'
tm[DEDENT] = 'DEDENT'
if PYTHON_VERSION_INFO >= (3, 5, 0):
from xonsh.tokenize import ASYNC, AWAIT
tm[ASYNC] = 'ASYNC'
tm[AWAIT] = 'AWAIT'
return tm
token_map = LazyObject(_token_map, globals(), 'token_map')
"""
Mapping from ``tokenize`` tokens (or token types) to PLY token types. If a
simple one-to-one mapping from ``tokenize`` to PLY exists, the lexer will look
@ -23,42 +66,7 @@ it up here and generate a single PLY token of the given type. Otherwise, it
will fall back to handling that token using one of the handlers in
``special_handlers``.
"""
# operators
_op_map = {
# punctuation
',': 'COMMA', '.': 'PERIOD', ';': 'SEMI', ':': 'COLON',
'...': 'ELLIPSIS',
# basic operators
'+': 'PLUS', '-': 'MINUS', '*': 'TIMES', '@': 'AT', '/': 'DIVIDE',
'//': 'DOUBLEDIV', '%': 'MOD', '**': 'POW', '|': 'PIPE',
'~': 'TILDE', '^': 'XOR', '<<': 'LSHIFT', '>>': 'RSHIFT',
'<': 'LT', '<=': 'LE', '>': 'GT', '>=': 'GE', '==': 'EQ',
'!=': 'NE', '->': 'RARROW',
# assignment operators
'=': 'EQUALS', '+=': 'PLUSEQUAL', '-=': 'MINUSEQUAL',
'*=': 'TIMESEQUAL', '@=': 'ATEQUAL', '/=': 'DIVEQUAL', '%=': 'MODEQUAL',
'**=': 'POWEQUAL', '<<=': 'LSHIFTEQUAL', '>>=': 'RSHIFTEQUAL',
'&=': 'AMPERSANDEQUAL', '^=': 'XOREQUAL', '|=': 'PIPEEQUAL',
'//=': 'DOUBLEDIVEQUAL',
# extra xonsh operators
'?': 'QUESTION', '??': 'DOUBLE_QUESTION', '@$': 'ATDOLLAR',
'&': 'AMPERSAND',
}
for (op, type) in _op_map.items():
token_map[(tokenize.OP, op)] = type
token_map[tokenize.IOREDIRECT] = 'IOREDIRECT'
token_map[tokenize.STRING] = 'STRING'
token_map[tokenize.DOLLARNAME] = 'DOLLAR_NAME'
token_map[tokenize.NUMBER] = 'NUMBER'
token_map[tokenize.SEARCHPATH] = 'SEARCHPATH'
token_map[tokenize.NEWLINE] = 'NEWLINE'
token_map[tokenize.INDENT] = 'INDENT'
token_map[tokenize.DEDENT] = 'DEDENT'
if PYTHON_VERSION_INFO >= (3, 5, 0):
token_map[tokenize.ASYNC] = 'ASYNC'
token_map[tokenize.AWAIT] = 'AWAIT'
del _token_map
def _make_matcher_handler(tok, typ, pymode, ender):
@ -70,7 +78,7 @@ def _make_matcher_handler(tok, typ, pymode, ender):
state['pymode'].append((pymode, tok, matcher, token.start))
state['last'] = token
yield _new_token(typ, tok, token.start)
special_handlers[(tokenize.OP, tok)] = _inner_handler
special_handlers[(OP, tok)] = _inner_handler
def handle_name(state, token):
@ -180,18 +188,18 @@ def handle_double_pipe(state, token):
special_handlers = {
tokenize.NL: handle_ignore,
tokenize.COMMENT: handle_ignore,
tokenize.ENCODING: handle_ignore,
tokenize.ENDMARKER: handle_ignore,
tokenize.NAME: handle_name,
tokenize.ERRORTOKEN: handle_error_token,
(tokenize.OP, ')'): handle_rparen,
(tokenize.OP, '}'): handle_rbrace,
(tokenize.OP, ']'): handle_rbracket,
(tokenize.OP, '&&'): handle_double_amps,
(tokenize.OP, '||'): handle_double_pipe,
(tokenize.ERRORTOKEN, ' '): handle_error_space,
NL: handle_ignore,
COMMENT: handle_ignore,
ENCODING: handle_ignore,
ENDMARKER: handle_ignore,
NAME: handle_name,
ERRORTOKEN: handle_error_token,
(OP, ')'): handle_rparen,
(OP, '}'): handle_rbrace,
(OP, ']'): handle_rbracket,
(OP, '&&'): handle_double_amps,
(OP, '||'): handle_double_pipe,
(ERRORTOKEN, ' '): handle_error_space,
}
"""
Mapping from ``tokenize`` tokens (or token types) to the proper function for
@ -257,7 +265,7 @@ def get_tokens(s):
"""
state = {'indents': [0], 'last': None,
'pymode': [(True, '', '', (0, 0))],
'stream': tokenize.tokenize(BytesIO(s.encode('utf-8')).readline)}
'stream': tokenize(BytesIO(s.encode('utf-8')).readline)}
while True:
try:
token = next(state['stream'])
@ -269,7 +277,7 @@ def get_tokens(s):
e = 'Unmatched "{}" at line {}, column {}'
yield _new_token('ERRORTOKEN', e.format(o, l, c), (0, 0))
break
except tokenize.TokenError as e:
except TokenError as e:
# this is recoverable in single-line mode (from the shell)
# (e.g., EOF while scanning string literal)
yield _new_token('ERRORTOKEN', e.args[0], (0, 0))

View file

@ -4,8 +4,9 @@ import os
import sys
import enum
import builtins
from argparse import ArgumentParser, ArgumentTypeError
import importlib
from contextlib import contextmanager
from argparse import ArgumentParser, ArgumentTypeError
try:
from setproctitle import setproctitle
@ -13,6 +14,7 @@ except ImportError:
setproctitle = None
from xonsh import __version__
from xonsh.lazyasd import LazyObject
from xonsh.environ import DEFAULT_VALUES
from xonsh.shell import Shell
from xonsh.pretty import pprint, pretty
@ -22,9 +24,11 @@ from xonsh.tools import setup_win_unicode_console, print_color
from xonsh.platform import HAS_PYGMENTS, ON_WINDOWS
from xonsh.codecache import run_script_with_cache, run_code_with_cache
if HAS_PYGMENTS:
import pygments
from xonsh import pyghooks
pygments = LazyObject(lambda: importlib.import_module('pygments'),
globals(), 'pygments')
pyghooks = LazyObject(lambda: importlib.import_module('xonsh.pyghooks'),
globals(), 'pyghooks')
def path_argument(s):
@ -246,7 +250,8 @@ def postmain(args=None):
"""Teardown for main xonsh entry point, accepts parsed arguments."""
if ON_WINDOWS:
setup_win_unicode_console(enable=False)
del builtins.__xonsh_shell__
if hasattr(builtins, '__xonsh_shell__'):
del builtins.__xonsh_shell__
@contextmanager

View file

@ -13,122 +13,16 @@ This file was forked from the IPython project:
* Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
"""
import io
import os.path
import os
import re
import tokenize
from xonsh.lazyasd import LazyObject
cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)", re.UNICODE)
cookie_comment_re = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE)
try:
# Available in Python 3
from tokenize import detect_encoding
except ImportError:
from codecs import lookup, BOM_UTF8
# Copied from Python 3.2 tokenize
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace("_", "-")
if enc == "utf-8" or enc.startswith("utf-8-"):
return "utf-8"
if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
return "iso-8859-1"
return orig_enc
# Copied from Python 3.2 tokenize
def detect_encoding(readline):
"""
The detect_encoding() function is used to detect the encoding that
should be used to decode a Python source file. It requires one
argment, readline, in the same way as the tokenize() generator.
It will call readline a maximum of twice, and return the encoding used
(as a string) and a list of any lines (left as bytes) it has read in.
It detects the encoding from the presence of a utf-8 bom or an encoding
cookie as specified in pep-0263. If both a bom and a cookie are
present, but disagree, a SyntaxError will be raised. If the encoding
cookie is an invalid charset, raise a SyntaxError. Note that if a
utf-8 bom is found, 'utf-8-sig' is returned.
If no encoding is specified, then the default of 'utf-8' will be
returned.
"""
bom_found = False
encoding = None
default = 'utf-8'
def read_or_stop():
try:
return readline()
except StopIteration:
return b''
def find_cookie(line):
try:
line_string = line.decode('ascii')
except UnicodeDecodeError:
return None
matches = cookie_re.findall(line_string)
if not matches:
return None
encoding = _get_normal_name(matches[0])
try:
codec = lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
raise SyntaxError("unknown encoding: " + encoding, filename='<file>')
if bom_found:
if codec.name != 'utf-8':
# This behaviour mimics the Python interpreter
raise SyntaxError('encoding problem: utf-8', filename='<file>')
encoding += '-sig'
return encoding
first = read_or_stop()
if first.startswith(BOM_UTF8):
bom_found = True
first = first[3:]
default = 'utf-8-sig'
if not first:
return default, []
encoding = find_cookie(first)
if encoding:
return encoding, [first]
second = read_or_stop()
if not second:
return default, [first]
encoding = find_cookie(second)
if encoding:
return encoding, [first, second]
return default, [first, second]
try:
# Available in Python 3.2 and above.
from tokenize import tokopen
except ImportError:
# Copied from Python 3.2 tokenize
def tokopen(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buf = io.open(filename, 'rb') # Tweaked to use io.open for Python 2
encoding, lines = detect_encoding(buf.readline)
buf.seek(0)
text = io.TextIOWrapper(buf, encoding, line_buffering=True)
text.mode = 'r'
return text
cookie_comment_re = LazyObject(
lambda: re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", re.UNICODE),
globals(), 'cookie_comment_re')
def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True):
"""Converts a bytes string with python source code to unicode.
@ -145,7 +39,7 @@ def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True):
else:
buf = txt
try:
encoding, _ = detect_encoding(buf.readline)
encoding, _ = tokenize.detect_encoding(buf.readline)
except SyntaxError:
encoding = "ascii"
buf.seek(0)
@ -192,7 +86,7 @@ def read_py_file(filename, skip_encoding_cookie=True):
-------
A unicode string containing the contents of the file.
"""
with tokopen(filename) as f: # the open function defined in this module.
with tokenize.tokopen(filename) as f: # the open function defined in this module.
if skip_encoding_cookie:
return "".join(strip_encoding_cookie(f))
else:
@ -238,22 +132,3 @@ def _list_readline(x):
return next(x)
return readline
# Code for going between .py files and cached .pyc files ----------------------
try: # Python 3.2, see PEP 3147
from imp import source_from_cache, cache_from_source # pylint:disable=unused-import
except ImportError:
# Python <= 3.1: .pyc files go next to .py
def source_from_cache(path):
basename, ext = os.path.splitext(path)
if ext not in ('.pyc', '.pyo'):
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__
basename, _ = os.path.splitext(path)
return basename + '.pyc' if debug_override else '.pyo'

View file

@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
"""Implements the base xonsh parser."""
import re
import time
from threading import Thread
from collections import Iterable, Sequence, Mapping
try:
@ -13,6 +15,10 @@ from xonsh.ast import has_elts, xonsh_call
from xonsh.lexer import Lexer, LexToken
from xonsh.platform import PYTHON_VERSION_INFO
from xonsh.tokenize import SearchPath
from xonsh.lazyasd import LazyObject
RE_SEARCHPATH = LazyObject(lambda: re.compile(SearchPath), globals(),
'RE_SEARCHPATH')
class Location(object):
"""Location in a file."""
@ -116,7 +122,7 @@ def xonsh_pathsearch(pattern, pymode=False, lineno=None, col=None):
"""Creates the AST node for calling the __xonsh_pathsearch__() function.
The pymode argument indicate if it is called from subproc or python mode"""
pymode = ast.NameConstant(value=pymode, lineno=lineno, col_offset=col)
searchfunc, pattern = re.match(SearchPath, pattern).groups()
searchfunc, pattern = RE_SEARCHPATH.match(pattern).groups()
pattern = ast.Str(s=pattern, lineno=lineno,
col_offset=col)
if searchfunc in {'r', ''}:
@ -168,6 +174,20 @@ def lopen_loc(x):
return lineno, col
class YaccLoader(Thread):
"""Thread to load (but not shave) the yacc parser."""
def __init__(self, parser, yacc_kwargs, *args, **kwargs):
super().__init__(*args, **kwargs)
self.daemon = True
self.parser = parser
self.yacc_kwargs = yacc_kwargs
self.start()
def run(self):
self.parser.parser = yacc.yacc(**self.yacc_kwargs)
class BaseParser(object):
"""A base class that parses the xonsh language."""
@ -249,7 +269,9 @@ class BaseParser(object):
yacc_kwargs['errorlog'] = yacc.NullLogger()
if outputdir is not None:
yacc_kwargs['outputdir'] = outputdir
self.parser = yacc.yacc(**yacc_kwargs)
self.parser = None
YaccLoader(self, yacc_kwargs)
#self.parser = yacc.yacc(**yacc_kwargs)
# Keeps track of the last token given to yacc (the lookahead token)
self._last_yielded_token = None
@ -280,6 +302,8 @@ class BaseParser(object):
self.reset()
self.xonsh_code = s
self.lexer.fname = filename
while self.parser is None:
time.sleep(0.01) # block until the parser is ready
tree = self.parser.parse(input=s, lexer=self.lexer, debug=debug_level)
# hack for getting modes right
if mode == 'single':
@ -1703,7 +1727,7 @@ class BaseParser(object):
def p_atom_ellip(self, p):
"""atom : ellipsis_tok"""
p1 = p[1]
p[0] = ast.Ellipsis(lineno=p1.lineno, col_offset=p1.lexpos)
p[0] = ast.EllipsisNode(lineno=p1.lineno, col_offset=p1.lexpos)
def p_atom_none(self, p):
"""atom : none_tok"""

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""Implements the xonsh parser for Python v3.4."""
from xonsh import ast
import xonsh.ast as ast
from xonsh.parsers.base import BaseParser

View file

@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""Implements the xonsh parser for Python v3.5."""
from xonsh import ast
import xonsh.ast as ast
from xonsh.parsers.base import BaseParser

View file

@ -1,19 +1,29 @@
""" Module for platform-specific constants and implementations, as well as
compatibility layers to make use of the 'best' implementation available
on a platform.
"""Module for platform-specific constants and implementations, as well as
compatibility layers to make use of the 'best' implementation available
on a platform.
"""
import os
import sys
import pathlib
import platform
from functools import lru_cache
import functools
import subprocess
import importlib.util
try:
import distro
except ImportError:
distro = None
except:
raise
from xonsh.lazyasd import LazyObject, LazyBool
def _distro():
try:
import distro as d
except ImportError:
d = None
except:
raise
return d
distro = LazyObject(_distro, globals(), 'distro')
del _distro
# do not import any xonsh-modules here to avoid circular dependencies
@ -21,16 +31,18 @@ except:
#
# OS
#
ON_DARWIN = platform.system() == 'Darwin'
ON_DARWIN = LazyBool(lambda: platform.system() == 'Darwin',
globals(), 'ON_DARWIN')
""" ``True`` if executed on a Darwin platform, else ``False``. """
ON_LINUX = platform.system() == 'Linux'
ON_LINUX = LazyBool(lambda: platform.system() == 'Linux',
globals(), 'ON_LINUX')
""" ``True`` if executed on a Linux platform, else ``False``. """
ON_WINDOWS = platform.system() == 'Windows'
ON_WINDOWS = LazyBool(lambda: platform.system() == 'Windows',
globals(), 'ON_WINDOWS')
""" ``True`` if executed on a native Windows platform, else ``False``. """
ON_CYGWIN = sys.platform == 'cygwin'
ON_CYGWIN = LazyBool(lambda: sys.platform == 'cygwin', globals(), 'ON_CYGWIN')
""" ``True`` if executed on a Cygwin Windows platform, else ``False``. """
ON_POSIX = (os.name == 'posix')
ON_POSIX = LazyBool(lambda: (os.name == 'posix'), globals(), 'ON_POSIX')
""" ``True`` if executed on a POSIX-compliant platform, else ``False``. """
@ -40,39 +52,40 @@ ON_POSIX = (os.name == 'posix')
PYTHON_VERSION_INFO = sys.version_info[:3]
""" Version of Python interpreter as three-value tuple. """
ON_ANACONDA = any(s in sys.version for s in {'Anaconda', 'Continuum'})
ON_ANACONDA = LazyBool(
lambda: any(s in sys.version for s in {'Anaconda', 'Continuum'}),
globals(), 'ON_ANACONDA')
""" ``True`` if executed in an Anaconda instance, else ``False``. """
def _has_pygments():
spec = importlib.util.find_spec('pygments')
return (spec is not None)
HAS_PYGMENTS = False
HAS_PYGMENTS = LazyBool(_has_pygments, globals(), 'HAS_PYGMENTS')
""" ``True`` if `pygments` is available, else ``False``. """
PYGMENTS_VERSION = None
""" `pygments.__version__` version if available, else ``Ǹone``. """
try:
import pygments
except ImportError:
pass
except:
raise
else:
HAS_PYGMENTS, PYGMENTS_VERSION = True, pygments.__version__
del _has_pygments
@lru_cache(1)
@functools.lru_cache(1)
def pygments_version():
"""pygments.__version__ version if available, else Ǹone."""
if HAS_PYGMENTS:
import pygments
v = pygments.__version__
else:
v = None
return v
@functools.lru_cache(1)
def has_prompt_toolkit():
""" Tests if the `prompt_toolkit` is available. """
try:
import prompt_toolkit
except ImportError:
return False
except:
raise
else:
return True
spec = importlib.util.find_spec('pygments')
return (spec is not None)
@lru_cache(1)
@functools.lru_cache(1)
def ptk_version():
""" Returns `prompt_toolkit.__version__` if available, else ``None``. """
if has_prompt_toolkit():
@ -82,7 +95,7 @@ def ptk_version():
return None
@lru_cache(1)
@functools.lru_cache(1)
def ptk_version_info():
""" Returns `prompt_toolkit`'s version as tuple of integers. """
if has_prompt_toolkit():
@ -91,7 +104,7 @@ def ptk_version_info():
return None
@lru_cache(1)
@functools.lru_cache(1)
def best_shell_type():
if ON_WINDOWS or has_prompt_toolkit():
return 'prompt_toolkit'
@ -99,15 +112,13 @@ def best_shell_type():
return 'readline'
@lru_cache(1)
@functools.lru_cache(1)
def is_readline_available():
"""Checks if readline is available to import."""
try:
import readline
except: # pyreadline will sometimes fail in strange ways
return False
else:
return True
spec = importlib.util.find_spec('readline')
return (spec is not None)
#
# Encoding
#
@ -117,11 +128,9 @@ DEFAULT_ENCODING = sys.getdefaultencoding()
if PYTHON_VERSION_INFO < (3, 5, 0):
from pathlib import Path
class DirEntry:
def __init__(self, directory, name):
self.__path__ = Path(directory) / name
self.__path__ = pathlib.Path(directory) / name
self.name = name
self.path = str(self.__path__)
self.is_symlink = self.__path__.is_symlink
@ -157,106 +166,117 @@ else:
# Linux distro
#
LINUX_DISTRO = None
""" The id of the Linux distribution running on, possibly 'unknown'.
``Ǹone`` on non-Linux platforms.
"""
if ON_LINUX:
if distro:
LINUX_DISTRO = distro.id()
elif PYTHON_VERSION_INFO < (3, 7, 0):
LINUX_DISTRO = platform.linux_distribution()[0] or 'unknown'
elif '-ARCH-' in platform.platform():
LINUX_DISTRO = 'arch' # that's the only one we need to know for now
@functools.lru_cache(1)
def linux_distro():
"""The id of the Linux distribution running on, possibly 'unknown'.
None on non-Linux platforms.
"""
if ON_LINUX:
if distro:
ld = distro.id()
elif PYTHON_VERSION_INFO < (3, 7, 0):
ld = platform.linux_distribution()[0] or 'unknown'
elif '-ARCH-' in platform.platform():
ld = 'arch' # that's the only one we need to know for now
else:
ld = 'unknown'
else:
LINUX_DISTRO = 'unknown'
ld = None
return ld
#
# Windows
#
if ON_WINDOWS:
try:
import win_unicode_console
except ImportError:
win_unicode_console = None
else:
win_unicode_console = None
if ON_WINDOWS:
@functools.lru_cache(1)
def git_for_windows_path():
"""Returns the path to git for windows, if available and None otherwise."""
import winreg
try:
key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
'SOFTWARE\\GitForWindows')
GIT_FOR_WINDOWS_PATH, type = winreg.QueryValueEx(key, "InstallPath")
gfwp, _ = winreg.QueryValueEx(key, "InstallPath")
except FileNotFoundError:
GIT_FOR_WINDOWS_PATH = None
gfwp = None
return gfwp
@functools.lru_cache(1)
def windows_bash_command():
"""Determines teh command for Bash on windows."""
import winreg
# Check that bash is on path otherwise try the default directory
# used by Git for windows
import subprocess
WINDOWS_BASH_COMMAND = 'bash'
wbc = 'bash'
try:
subprocess.check_call([WINDOWS_BASH_COMMAND, '--version'],
subprocess.check_call([wbc, '--version'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except (FileNotFoundError, subprocess.CalledProcessError):
if GIT_FOR_WINDOWS_PATH:
bashcmd = os.path.join(GIT_FOR_WINDOWS_PATH, 'bin\\bash.exe')
gfwp = git_for_windows_path()
if gfwp:
bashcmd = os.path.join(gfwp, 'bin\\bash.exe')
if os.path.isfile(bashcmd):
WINDOWS_BASH_COMMAND = bashcmd
wbc = bashcmd
return wbc
#
# Environment variables defaults
#
BASH_COMPLETIONS_DEFAULT = ()
"""A possibly empty tuple with default paths to Bash completions known for
the current platform.
"""
if LINUX_DISTRO == 'arch':
BASH_COMPLETIONS_DEFAULT = (
'/usr/share/bash-completion/bash_completion',
'/usr/share/bash-completion/completions')
PATH_DEFAULT = ('/usr/local/sbin',
'/usr/local/bin', '/usr/bin', '/usr/bin/site_perl',
'/usr/bin/vendor_perl', '/usr/bin/core_perl')
elif ON_LINUX or ON_CYGWIN:
BASH_COMPLETIONS_DEFAULT = (
'/usr/share/bash-completion',
'/usr/share/bash-completion/completions')
PATH_DEFAULT = (os.path.expanduser('~/bin'), '/usr/local/sbin',
'/usr/local/bin', '/usr/sbin', '/usr/bin', '/sbin', '/bin',
'/usr/games', '/usr/local/games')
elif ON_DARWIN:
BASH_COMPLETIONS_DEFAULT = (
'/usr/local/etc/bash_completion',
'/opt/local/etc/profile.d/bash_completion.sh')
PATH_DEFAULT = ('/usr/local/bin', '/usr/bin', '/bin', '/usr/sbin', '/sbin')
elif ON_WINDOWS and GIT_FOR_WINDOWS_PATH:
BASH_COMPLETIONS_DEFAULT = (
os.path.join(GIT_FOR_WINDOWS_PATH,
'usr\\share\\bash-completion'),
os.path.join(GIT_FOR_WINDOWS_PATH,
'usr\\share\\bash-completion\\completions'),
os.path.join(GIT_FOR_WINDOWS_PATH,
def _bcd():
"""A possibly empty tuple with default paths to Bash completions known for
the current platform.
"""
if ON_LINUX or ON_CYGWIN:
if linux_distro() == 'arch':
bcd = (
'/usr/share/bash-completion/bash_completion',
'/usr/share/bash-completion/completions')
else:
bcd = ('/usr/share/bash-completion',
'/usr/share/bash-completion/completions')
elif ON_DARWIN:
bcd = ('/usr/local/etc/bash_completion',
'/opt/local/etc/profile.d/bash_completion.sh')
elif ON_WINDOWS and git_for_windows_path():
bcd = (os.path.join(git_for_windows_path(),
'usr\\share\\bash-completion'),
os.path.join(git_for_windows_path(),
'usr\\share\\bash-completion\\completions'),
os.path.join(git_for_windows_path(),
'mingw64\\share\\git\\completion\\git-completion.bash'))
PATH_DEFAULT = tuple(winreg.QueryValueEx(winreg.OpenKey(
winreg.HKEY_LOCAL_MACHINE,
r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment'),
'Path')[0].split(os.pathsep))
else:
PATH_DEFAULT = ()
else:
bcd = ()
return bcd
#
# All constants as a dict
#
PLATFORM_INFO = {name: obj for name, obj in globals().items()
if name.isupper()}
""" The constants of this module as dictionary. """
BASH_COMPLETIONS_DEFAULT = LazyObject(_bcd, globals(),
'BASH_COMPLETIONS_DEFAULT')
del _bcd
def _pd():
if ON_LINUX or ON_CYGWIN:
if linux_distro() == 'arch':
pd = ('/usr/local/sbin',
'/usr/local/bin', '/usr/bin', '/usr/bin/site_perl',
'/usr/bin/vendor_perl', '/usr/bin/core_perl')
else:
pd = (os.path.expanduser('~/bin'), '/usr/local/sbin',
'/usr/local/bin', '/usr/sbin', '/usr/bin', '/sbin', '/bin',
'/usr/games', '/usr/local/games')
elif ON_DARWIN:
pd = ('/usr/local/bin', '/usr/bin', '/bin', '/usr/sbin', '/sbin')
elif ON_WINDOWS:
import winreg
key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE,
r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment')
pd = tuple(winreg.QueryValueEx(key, 'Path')[0].split(os.pathsep))
else:
pd = ()
return pd
PATH_DEFAULT = LazyObject(_pd, globals(), 'PATH_DEFAULT')
del _pd

View file

@ -4,28 +4,29 @@ import os
import re
import string
import builtins
import importlib
from warnings import warn
from collections import ChainMap
from collections.abc import MutableMapping
from pygments.lexer import inherit, bygroups, using, this
from pygments.token import (Keyword, Name, Comment, String, Error, Number,
Operator, Generic, Whitespace, Token)
from pygments.lexers.shell import BashLexer
from pygments.lexers.agile import PythonLexer
from pygments.token import (Keyword, Name, Comment, String, Error, Number,
Operator, Generic, Whitespace, Token)
from pygments.style import Style
from pygments.styles import get_style_by_name
import pygments.util
from xonsh.lazyasd import LazyObject
from xonsh.tools import (ON_WINDOWS, intensify_colors_for_cmd_exe,
expand_gray_colors_for_cmd_exe)
from xonsh.tokenize import SearchPath
class XonshSubprocLexer(BashLexer):
"""Lexer for xonsh subproc mode."""
name = 'Xonsh subprocess lexer'
tokens = {'root': [(SearchPath, String.Backtick), inherit, ]}
@ -90,7 +91,9 @@ XonshSubprocLexer.tokens['root'] = [
Color = Token.Color # alias to new color token namespace
RE_BACKGROUND = re.compile('(BG#|BGHEX|BACKGROUND)')
RE_BACKGROUND = LazyObject(lambda: re.compile('(BG#|BGHEX|BACKGROUND)'),
globals(), 'RE_BACKGROUND')
def norm_name(name):
"""Normalizes a color name."""
@ -553,7 +556,7 @@ if hasattr(pygments.style, 'ansicolors'):
}
elif ON_WINDOWS and 'CONEMUANSI' not in os.environ:
# These colors must match the color specification
# in prompt_toolkit, so the colors are converted
# in prompt_toolkit, so the colors are converted
# correctly when using cmd.exe
DEFAULT_STYLE = {
Color.BLACK: '#000000',

View file

@ -18,17 +18,21 @@ from cmd import Cmd
from threading import Thread
from collections import deque
from xonsh import lazyjson
from xonsh.lazyjson import LazyJSON
from xonsh.lazyasd import LazyObject
from xonsh.base_shell import BaseShell
from xonsh.ansi_colors import partial_color_format, color_style_names, color_style
from xonsh.environ import partial_format_prompt, multiline_prompt
from xonsh.tools import print_exception
from xonsh.platform import HAS_PYGMENTS, ON_WINDOWS, ON_CYGWIN, ON_DARWIN
if HAS_PYGMENTS:
from xonsh import pyghooks
import pygments
from pygments.formatters.terminal256 import Terminal256Formatter
pygments = LazyObject(lambda: importlib.import_module('pygments'),
globals(), 'pygments')
terminal256 = LazyObject(lambda: importlib.import_module(
'pygments.formatters.terminal256'),
globals(), 'terminal')
pyghooks = LazyObject(lambda: importlib.import_module('xonsh.pyghooks'),
globals(), 'pyghooks')
readline = None
RL_COMPLETION_SUPPRESS_APPEND = RL_LIB = RL_STATE = None
@ -415,7 +419,7 @@ class ReadlineShell(BaseShell, Cmd):
env = builtins.__xonsh_env__
self.styler.style_name = env.get('XONSH_COLOR_STYLE')
style_proxy = pyghooks.xonsh_style_proxy(self.styler)
formatter = Terminal256Formatter(style=style_proxy)
formatter = terminal256.Terminal256Formatter(style=style_proxy)
s = pygments.format(string, formatter).rstrip()
print(s, **kwargs)
@ -452,7 +456,7 @@ class ReadlineHistoryAdder(Thread):
i = 1
for _, _, f in files:
try:
lj = lazyjson.LazyJSON(f, reopen=False)
lj = LazyJSON(f, reopen=False)
for cmd in lj['cmds']:
inp = cmd['inp'].splitlines()
for line in inp:

View file

@ -5,7 +5,7 @@ import builtins
from collections.abc import Mapping
from xonsh.tools import swap
from xonsh import lazyjson
from xonsh.lazyjson import LazyJSON
from xonsh.environ import Env
from xonsh.history import History
from xonsh.history import _info as history_info
@ -26,7 +26,7 @@ class Replayer(object):
Whether new file handle should be opened for each load, passed directly into
LazyJSON class.
"""
self._lj = lazyjson.LazyJSON(f, reopen=reopen)
self._lj = LazyJSON(f, reopen=reopen)
def __del__(self):
self._lj.close()
@ -78,7 +78,7 @@ class Replayer(object):
_REPLAY_PARSER = None
def _create_parser(p=None):
def _rp_create_parser(p=None):
global _REPLAY_PARSER
p_was_none = (p is None)
if _REPLAY_PARSER is not None and p_was_none:
@ -104,7 +104,7 @@ def _create_parser(p=None):
return p
def _main_action(ns, h=None):
def _rp_main_action(ns, h=None):
replayer = Replayer(ns.path)
hist = replayer.replay(merge_envs=ns.merge_envs, target=ns.target)
print('----------------------------------------------------------------')
@ -113,8 +113,8 @@ def _main_action(ns, h=None):
history_info(ns, hist)
def main(args, stdin=None):
def replay_main(args, stdin=None):
"""Acts as main function for replaying a xonsh history file."""
parser = _create_parser()
parser = _rp_create_parser()
ns = parser.parse_args(args)
_main_action(ns)
_rp_main_action(ns)

View file

@ -1,15 +1,16 @@
# -*- coding: utf-8 -*-
"""The xonsh shell"""
import builtins
import os
import random
import builtins
from warnings import warn
from xonsh import xontribs
from xonsh.xontribs import update_context
from xonsh.environ import xonshrc_context
from xonsh.execer import Execer
from xonsh.platform import (best_shell_type, has_prompt_toolkit, ptk_version,
ptk_version_info)
from xonsh.tools import XonshError
from xonsh.tools import XonshError, to_bool_or_int
class Shell(object):
@ -81,7 +82,9 @@ class Shell(object):
def _init_environ(self, ctx, config, rc, scriptcache, cacheall):
self.ctx = {} if ctx is None else ctx
self.execer = Execer(config=config, login=self.login, xonsh_ctx=self.ctx)
debug = to_bool_or_int(os.getenv('XONSH_DEBUG', '0'))
self.execer = Execer(config=config, login=self.login, xonsh_ctx=self.ctx,
debug_level=debug)
self.execer.scriptcache = scriptcache
self.execer.cacheall = cacheall
if self.stype != 'none' or self.login:

View file

@ -21,6 +21,8 @@ import termios
import tempfile
import threading
from xonsh.lazyasd import LazyObject
# The following escape codes are xterm codes.
# See http://rtfm.etla.org/xterm/ctlseq.html for more.
MODE_NUMS = ('1049', '47', '1047')
@ -28,8 +30,10 @@ START_ALTERNATE_MODE = frozenset('\x1b[?{0}h'.format(i).encode() for i in MODE_N
END_ALTERNATE_MODE = frozenset('\x1b[?{0}l'.format(i).encode() for i in MODE_NUMS)
ALTERNATE_MODE_FLAGS = tuple(START_ALTERNATE_MODE) + tuple(END_ALTERNATE_MODE)
RE_HIDDEN = re.compile(b'(\001.*?\002)')
RE_COLOR = re.compile(b'\033\[\d+;?\d*m')
RE_HIDDEN = LazyObject(lambda: re.compile(b'(\001.*?\002)'),
globals(), 'RE_HIDDEN')
RE_COLOR = LazyObject(lambda: re.compile(b'\033\[\d+;?\d*m'),
globals(), 'RE_COLOR')
def _findfirst(s, substrs):
"""Finds whichever of the given substrings occurs first in the given string
@ -89,7 +93,7 @@ class TeePTY(object):
self.encoding = encoding
self.errors = errors
self.buffer = io.BytesIO()
self.wcode = None # os.wait encoded retval
self.wcode = None # os.wait encoded retval
self._temp_stdin = None
def __str__(self):
@ -320,8 +324,7 @@ class TeePTY(object):
if 0.0 < delay:
time.sleep(delay)
if __name__ == '__main__':
def _teepty_main():
tpty = TeePTY()
tpty.spawn(sys.argv[1:])
print('-=-'*10)
@ -330,3 +333,6 @@ if __name__ == '__main__':
print(tpty)
print('-=-'*10)
print('Returned with status {0}'.format(tpty.wcode))
if __name__ == '__main__':
_teepty_main()

View file

@ -26,11 +26,15 @@ import re
import sys
from token import *
from xonsh.lazyasd import LazyObject
from xonsh.platform import PYTHON_VERSION_INFO
cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)', re.ASCII)
blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII)
cookie_re = LazyObject(
lambda: re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)', re.ASCII),
globals(), 'cookie_re')
blank_re = LazyObject(lambda: re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII),
globals(), 'blank_re')
import token
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
@ -158,15 +162,15 @@ class TokenInfo(collections.namedtuple('TokenInfo', 'type string start end line'
return self.type
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'
def tokany(*choices): return group(*choices) + '*'
def maybe(*choices): return group(*choices) + '?'
# Note: we use unicode matching for names ("\w") but ascii matching for
# number literals.
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'\$?\w+'
Ignore = Whitespace + tokany(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name_RE = r'\$?\w+'
Hexnumber = r'0[xX][0-9a-fA-F]+'
Binnumber = r'0[bB][01]+'
@ -217,7 +221,7 @@ Bracket = '[][(){}]'
Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(IORedirect, Number, Funny, String, Name, SearchPath)
PlainToken = group(IORedirect, Number, Funny, String, Name_RE, SearchPath)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
@ -227,7 +231,7 @@ ContStr = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n|\Z', Comment, Triple, SearchPath)
PseudoToken = Whitespace + group(PseudoExtras, IORedirect, Number, Funny,
ContStr, Name)
ContStr, Name_RE)
def _compile(expr):
return re.compile(expr, re.UNICODE)
@ -507,7 +511,7 @@ def detect_encoding(readline):
return default, [first, second]
def open(filename):
def _tokopen(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
@ -781,7 +785,7 @@ def tokenize(readline):
def generate_tokens(readline):
return _tokenize(readline, None)
def main():
def tokenize_main():
import argparse
# Helper error handling routines

View file

@ -21,25 +21,33 @@ import os
import re
import sys
import ast
import glob
import string
import ctypes
import builtins
import subprocess
import warnings
import functools
import threading
import traceback
from glob import iglob
from warnings import warn
import subprocess
import collections
import collections.abc as abc
from contextlib import contextmanager
from subprocess import CalledProcessError
from collections import OrderedDict, Sequence, Set
# adding further imports from xonsh modules is discouraged to avoid cirular
# dependencies
from xonsh.platform import (has_prompt_toolkit, scandir, win_unicode_console,
DEFAULT_ENCODING, ON_LINUX, ON_WINDOWS,
PYTHON_VERSION_INFO)
from xonsh.lazyasd import LazyObject, LazyDict
from xonsh.platform import (has_prompt_toolkit, scandir,
DEFAULT_ENCODING, ON_LINUX, ON_WINDOWS, PYTHON_VERSION_INFO)
IS_SUPERUSER = ctypes.windll.shell32.IsUserAnAdmin() != 0 if ON_WINDOWS else os.getuid() == 0
@functools.lru_cache(1)
def is_superuser():
if ON_WINDOWS:
rtn = (ctypes.windll.shell32.IsUserAnAdmin() != 0)
else:
rtn = (os.getuid() == 0)
return rtn
class XonshError(Exception):
@ -95,14 +103,26 @@ class XonshCalledProcessError(XonshError, CalledProcessError):
class DefaultNotGivenType(object):
"""Singleton for representing when no default value is given."""
__inst = None
def __new__(cls):
if DefaultNotGivenType.__inst is None:
DefaultNotGivenType.__inst = object.__new__(cls)
return DefaultNotGivenType.__inst
DefaultNotGiven = DefaultNotGivenType()
BEG_TOK_SKIPS = frozenset(['WS', 'INDENT', 'NOT', 'LPAREN'])
END_TOK_TYPES = frozenset(['SEMI', 'AND', 'OR', 'RPAREN'])
RE_END_TOKS = re.compile('(;|and|\&\&|or|\|\||\))')
LPARENS = frozenset(['LPAREN', 'AT_LPAREN', 'BANG_LPAREN', 'DOLLAR_LPAREN',
'ATDOLLAR_LPAREN'])
BEG_TOK_SKIPS = LazyObject(
lambda: frozenset(['WS', 'INDENT', 'NOT', 'LPAREN']),
globals(), 'BEG_TOK_SKIPS')
END_TOK_TYPES = LazyObject(lambda: frozenset(['SEMI', 'AND', 'OR', 'RPAREN']),
globals(), 'END_TOK_TYPES')
RE_END_TOKS = LazyObject(lambda: re.compile('(;|and|\&\&|or|\|\||\))'),
globals(), 'RE_END_TOKS')
LPARENS = LazyObject(lambda: frozenset(['LPAREN', 'AT_LPAREN', 'BANG_LPAREN',
'DOLLAR_LPAREN', 'ATDOLLAR_LPAREN']),
globals(), 'LPARENS')
def _is_not_lparen_and_rparen(lparens, rtok):
@ -449,7 +469,7 @@ def suggest_commands(cmd, env, aliases):
and levenshtein(_file.lower(), cmd, thresh) < thresh:
suggested[_file] = 'Command ({0})'.format(os.path.join(path, _file))
suggested = OrderedDict(
suggested = collections.OrderedDict(
sorted(suggested.items(),
key=lambda x: suggestion_sort_helper(x[0].lower(), cmd)))
num = min(len(suggested), max_sugg)
@ -678,7 +698,7 @@ def is_env_path(x):
if isinstance(x, str):
return False
else:
return (isinstance(x, Sequence) and
return (isinstance(x, abc.Sequence) and
all(isinstance(a, str) for a in x))
@ -739,7 +759,8 @@ def logfile_opt_to_str(x):
return str(x)
_FALSES = frozenset(['', '0', 'n', 'f', 'no', 'none', 'false'])
_FALSES = LazyObject(lambda: frozenset(['', '0', 'n', 'f', 'no', 'none',
'false']), globals(), '_FALSES')
def to_bool(x):
@ -803,7 +824,7 @@ def ensure_int_or_slice(x):
def is_string_set(x):
"""Tests if something is a set"""
return (isinstance(x, Set) and
return (isinstance(x, abc.Set) and
all(isinstance(a, str) for a in x))
@ -822,7 +843,7 @@ def set_to_csv(x):
def is_bool_seq(x):
"""Tests if an object is a sequence of bools."""
return isinstance(x, Sequence) and all(isinstance(y, bool) for y in x)
return isinstance(x, abc.Sequence) and all(isinstance(y, bool) for y in x)
def csv_to_bool_seq(x):
@ -848,14 +869,19 @@ def to_completions_display_value(x):
elif x == 'single':
pass
else:
warn('"{}" is not a valid value for $COMPLETIONS_DISPLAY. '.format(x) +
'Using "multi".', RuntimeWarning)
msg = '"{}" is not a valid value for $COMPLETIONS_DISPLAY. '.format(x)
msg += 'Using "multi".'
warnings.warn(msg, RuntimeWarning)
x = 'multi'
return x
def setup_win_unicode_console(enable):
""""Enables or disables unicode display on windows."""
try:
import win_unicode_console
except ImportError:
win_unicode_console = False
enable = to_bool(enable)
if ON_WINDOWS and win_unicode_console:
if enable:
@ -876,9 +902,11 @@ _mb_to_b = lambda x: 1024 * _kb_to_b(x)
_gb_to_b = lambda x: 1024 * _mb_to_b(x)
_tb_to_b = lambda x: 1024 * _tb_to_b(x)
CANON_HISTORY_UNITS = frozenset(['commands', 'files', 's', 'b'])
CANON_HISTORY_UNITS = LazyObject(
lambda: frozenset(['commands', 'files', 's', 'b']),
globals(), 'CANON_HISTORY_UNITS')
HISTORY_UNITS = {
HISTORY_UNITS = LazyObject(lambda: {
'': ('commands', int),
'c': ('commands', int),
'cmd': ('commands', int),
@ -928,13 +956,14 @@ HISTORY_UNITS = {
'tb': ('b', _tb_to_b),
'terabyte': ('b', _tb_to_b),
'terabytes': ('b', _tb_to_b),
}
}, globals(), 'HISTORY_UNITS')
"""Maps lowercase unit names to canonical name and conversion utilities."""
def is_history_tuple(x):
"""Tests if something is a proper history value, units tuple."""
if isinstance(x, Sequence) and len(x) == 2 and isinstance(x[0], (int, float)) \
and x[1].lower() in CANON_HISTORY_UNITS:
if isinstance(x, abc.Sequence) and len(x) == 2 and \
isinstance(x[0], (int, float)) and \
x[1].lower() in CANON_HISTORY_UNITS:
return True
return False
@ -969,11 +998,13 @@ def dynamic_cwd_tuple_to_str(x):
return str(x[0])
RE_HISTORY_TUPLE = re.compile('([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)\s*([A-Za-z]*)')
RE_HISTORY_TUPLE = LazyObject(
lambda: re.compile('([-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?)\s*([A-Za-z]*)'),
globals(), 'RE_HISTORY_TUPLE')
def to_history_tuple(x):
"""Converts to a canonincal history tuple."""
if not isinstance(x, (Sequence, float, int)):
if not isinstance(x, (abc.Sequence, float, int)):
raise ValueError('history size must be given as a sequence or number')
if isinstance(x, str):
m = RE_HISTORY_TUPLE.match(x.strip().lower())
@ -1083,8 +1114,9 @@ def expand_gray_colors_for_cmd_exe(style_map):
def intensify_colors_on_win_setter(enable):
""" Resets the style when setting the INTENSIFY_COLORS_ON_WIN
environment variable. """
"""Resets the style when setting the INTENSIFY_COLORS_ON_WIN
environment variable.
"""
enable = to_bool(enable)
delattr(builtins.__xonsh_shell__.shell.styler, 'style_name')
return enable
@ -1099,22 +1131,24 @@ _STRINGS = (_RE_STRING_TRIPLE_DOUBLE,
_RE_STRING_TRIPLE_SINGLE,
_RE_STRING_DOUBLE,
_RE_STRING_SINGLE)
RE_BEGIN_STRING = re.compile("(" + _RE_STRING_START +
'(' + "|".join(_STRINGS) +
'))')
RE_BEGIN_STRING = LazyObject(
lambda: re.compile("(" + _RE_STRING_START + \
'(' + "|".join(_STRINGS) + '))'),
globals(), 'RE_BEGIN_STRING')
"""Regular expression matching the start of a string, including quotes and
leading characters (r, b, or u)"""
RE_STRING_START = re.compile(_RE_STRING_START)
RE_STRING_START = LazyObject(lambda: re.compile(_RE_STRING_START),
globals(), 'RE_STRING_START')
"""Regular expression matching the characters before the quotes when starting a
string (r, b, or u, case insensitive)"""
RE_STRING_CONT = {k: re.compile(v) for k,v in {
'"': r'((\\(.|\n))|([^"\\]))*',
"'": r"((\\(.|\n))|([^'\\]))*",
'"""': r'((\\(.|\n))|([^"\\])|("(?!""))|\n)*',
"'''": r"((\\(.|\n))|([^'\\])|('(?!''))|\n)*",
}.items()}
RE_STRING_CONT = LazyDict({
'"': lambda: re.compile(r'((\\(.|\n))|([^"\\]))*'),
"'": lambda: re.compile(r"((\\(.|\n))|([^'\\]))*"),
'"""': lambda: re.compile(r'((\\(.|\n))|([^"\\])|("(?!""))|\n)*'),
"'''": lambda: re.compile(r"((\\(.|\n))|([^'\\])|('(?!''))|\n)*"),
}, globals(), 'RE_STRING_CONT')
"""Dictionary mapping starting quote sequences to regular expressions that
match the contents of a string beginning with those quotes (not including the
terminating quotes)"""
@ -1321,7 +1355,7 @@ def normabspath(p):
return os.path.normcase(os.path.abspath(p))
class CommandsCache(Set):
class CommandsCache(abc.Set):
"""A lazy cache representing the commands available on the file system."""
def __init__(self):
@ -1390,7 +1424,8 @@ class CommandsCache(Set):
return len(self._cmds_cache)
WINDOWS_DRIVE_MATCHER = re.compile(r'^\w:')
WINDOWS_DRIVE_MATCHER = LazyObject(lambda: re.compile(r'^\w:'),
globals(), 'WINDOWS_DRIVE_MATCHER')
def expand_case_matching(s):
@ -1446,9 +1481,9 @@ def _iglobpath(s, ignore_case=False):
if '**' in s and '**/*' not in s:
s = s.replace('**', '**/*')
# `recursive` is only a 3.5+ kwarg.
return iglob(s, recursive=True), s
return glob.iglob(s, recursive=True), s
else:
return iglob(s), s
return glob.iglob(s), s
def iglobpath(s, ignore_case=False):
"""Simple wrapper around iglob that also expands home and env vars."""

View file

@ -4,19 +4,24 @@ import re
import sys
import inspect
import linecache
import importlib
from functools import lru_cache
from argparse import ArgumentParser
from xonsh.tools import DefaultNotGiven, print_color, normabspath, to_bool
from xonsh.lazyasd import LazyObject
from xonsh.platform import HAS_PYGMENTS
from xonsh import inspectors
from xonsh.environ import _replace_home as replace_home
from xonsh.tools import DefaultNotGiven, print_color, normabspath, to_bool
from xonsh.inspectors import find_file, getouterframes
from xonsh.environ import _replace_home
if HAS_PYGMENTS:
from xonsh import pyghooks
import pygments
import pygments.formatters.terminal
pygments = LazyObject(lambda: importlib.import_module('pygments'),
globals(), 'pygments')
terminal = LazyObject(lambda: importlib.import_module(
'pygments.formatters.terminal'),
globals(), 'terminal')
pyghooks = LazyObject(lambda: importlib.import_module('xonsh.pyghooks'),
globals(), 'pyghooks')
class TracerType(object):
"""Represents a xonsh tracer object, which keeps track of all tracing
@ -35,7 +40,7 @@ class TracerType(object):
self.files = set()
self.usecolor = True
self.lexer = pyghooks.XonshLexer()
self.formatter = pygments.formatters.terminal.TerminalFormatter()
self.formatter = terminal.TerminalFormatter()
self._last = ('', -1) # filename, lineno tuple
def __del__(self):
@ -50,7 +55,7 @@ class TracerType(object):
files.add(normabspath(filename))
sys.settrace(self.trace)
curr = inspect.currentframe()
for frame, fname, *_ in inspectors.getouterframes(curr, context=0):
for frame, fname, *_ in getouterframes(curr, context=0):
if normabspath(fname) in files:
frame.f_trace = self.trace
@ -61,7 +66,7 @@ class TracerType(object):
if len(self.files) == 0:
sys.settrace(self.prev_tracer)
curr = inspect.currentframe()
for frame, fname, *_ in inspectors.getouterframes(curr, context=0):
for frame, fname, *_ in getouterframes(curr, context=0):
if normabspath(fname) == filename:
frame.f_trace = self.prev_tracer
self.prev_tracer = DefaultNotGiven
@ -70,20 +75,22 @@ class TracerType(object):
"""Implements a line tracing function."""
if event not in self.valid_events:
return self.trace
fname = inspectors.find_file(frame)
fname = find_file(frame)
if fname in self.files:
lineno = frame.f_lineno
curr = (fname, lineno)
if curr != self._last:
line = linecache.getline(fname, lineno).rstrip()
s = format_line(fname, lineno, line, color=self.usecolor,
lexer=self.lexer, formatter=self.formatter)
s = tracer_format_line(fname, lineno, line,
color=self.usecolor,
lexer=self.lexer,
formatter=self.formatter)
print_color(s)
self._last = curr
return self.trace
tracer = TracerType()
tracer = LazyObject(TracerType, globals(), 'tracer')
COLORLESS_LINE = '{fname}:{lineno}:{line}'
COLOR_LINE = ('{{PURPLE}}{fname}{{BLUE}}:'
@ -91,9 +98,9 @@ COLOR_LINE = ('{{PURPLE}}{fname}{{BLUE}}:'
'{{NO_COLOR}}')
def format_line(fname, lineno, line, color=True, lexer=None, formatter=None):
def tracer_format_line(fname, lineno, line, color=True, lexer=None, formatter=None):
"""Formats a trace line suitable for printing."""
fname = min(fname, replace_home(fname), os.path.relpath(fname), key=len)
fname = min(fname, _replace_home(fname), os.path.relpath(fname), key=len)
if not color:
return COLORLESS_LINE.format(fname=fname, lineno=lineno, line=line)
cline = COLOR_LINE.format(fname=fname, lineno=lineno)
@ -114,11 +121,11 @@ def _find_caller(args):
"""Somewhat hacky method of finding the __file__ based on the line executed."""
re_line = re.compile(r'[^;\s|&<>]+\s+' + r'\s+'.join(args))
curr = inspect.currentframe()
for _, fname, lineno, _, lines, _ in inspectors.getouterframes(curr, context=1)[3:]:
for _, fname, lineno, _, lines, _ in getouterframes(curr, context=1)[3:]:
if lines is not None and re_line.search(lines[0]) is not None:
return fname
elif lineno == 1 and re_line.search(linecache.getline(fname, lineno)) is not None:
# There is a bug in CPython such that inspectors.getouterframes(curr, context=1)
# There is a bug in CPython such that getouterframes(curr, context=1)
# will actually return the 2nd line in the code_context field, even though
# line number is itself correct. We manually fix that in this branch.
return fname
@ -154,8 +161,8 @@ def _color(ns, args):
tracer.usecolor = ns.toggle
@lru_cache()
def _create_parser():
@lru_cache(1)
def _tracer_create_parser():
"""Creates tracer argument parser"""
p = ArgumentParser(prog='trace',
description='tool for tracing xonsh code as it runs.')
@ -176,7 +183,7 @@ def _create_parser():
return p
_MAIN_ACTIONS = {
_TRACER_MAIN_ACTIONS = {
'on': _on,
'add': _on,
'start': _on,
@ -188,12 +195,12 @@ _MAIN_ACTIONS = {
}
def main(args=None):
def tracermain(args=None):
"""Main function for tracer command-line interface."""
parser = _create_parser()
parser = _tracer_create_parser()
ns = parser.parse_args(args)
return _MAIN_ACTIONS[ns.action](ns, args)
return _TRACER_MAIN_ACTIONS[ns.action](ns, args)
if __name__ == '__main__':
main()
tracermain()

View file

@ -1,10 +1,8 @@
import os
from os.path import join, basename, exists, expanduser
import venv
import builtins
from shutil import rmtree
import shutil
import xonsh.tools
from xonsh.platform import ON_POSIX, ON_WINDOWS, scandir
@ -15,9 +13,9 @@ class Vox:
"""Ensure that $VIRTUALENV_HOME is defined and declare the available vox commands"""
if not builtins.__xonsh_env__.get('VIRTUALENV_HOME'):
home_path = expanduser('~')
builtins.__xonsh_env__['VIRTUALENV_HOME'] = join(home_path, '.virtualenvs')
home_path = os.path.expanduser('~')
venvdir = os.path.join(home_path, '.virtualenvs')
builtins.__xonsh_env__['VIRTUALENV_HOME'] = venvdir
self.commands = {
('new',): self.create_env,
@ -58,14 +56,11 @@ class Vox:
name : str
Virtual environment name
"""
env_path = join(builtins.__xonsh_env__['VIRTUALENV_HOME'], name)
env_path = os.path.join(builtins.__xonsh_env__['VIRTUALENV_HOME'], name)
print('Creating environment...')
venv.create(env_path, with_pip=True)
print('Environment "%s" created. Activate it with "vox activate %s".\n' % (name, name))
msg = 'Environment {0!r} created. Activate it with "vox activate {0}".\n'
print(msg.format(name))
def activate_env(self, name):
"""Activate a virtual environment.
@ -75,31 +70,23 @@ class Vox:
name : str
Virtual environment name
"""
env_path = join(builtins.__xonsh_env__['VIRTUALENV_HOME'], name)
if not exists(env_path):
env = builtins.__xonsh_env__
env_path = os.path.join(env['VIRTUALENV_HOME'], name)
if not os.path.exists(env_path):
print('This environment doesn\'t exist. Create it with "vox new %s".\n' % name)
return None
if ON_WINDOWS:
bin_dir = 'Scripts'
elif ON_POSIX:
bin_dir = 'bin'
else:
print('This OS is not supported.')
return None
bin_path = join(env_path, bin_dir)
if 'VIRTUAL_ENV' in __xonsh_env__:
bin_path = os.path.join(env_path, bin_dir)
if 'VIRTUAL_ENV' in env:
self.deactivate_env()
__xonsh_env__['PATH'].insert(0, bin_path)
__xonsh_env__['VIRTUAL_ENV'] = env_path
env['PATH'].insert(0, bin_path)
env['VIRTUAL_ENV'] = env_path
print('Activated "%s".\n' % name)
@staticmethod
@ -112,9 +99,9 @@ class Vox:
env_path = __xonsh_env__['VIRTUAL_ENV']
env_name = basename(env_path)
env_name = os.path.basename(env_path)
if xonsh.tools.ON_WINDOWS:
if ON_WINDOWS:
bin_dir = 'Scripts'
elif ON_POSIX:
@ -124,7 +111,7 @@ class Vox:
print('This OS is not supported.')
return None
bin_path = join(env_path, bin_dir)
bin_path = os.path.join(env_path, bin_dir)
while bin_path in __xonsh_env__['PATH']:
__xonsh_env__['PATH'].remove(bin_path)
@ -161,15 +148,11 @@ class Vox:
name : str
virtual environment name
"""
if 'VIRTUAL_ENV' in __xonsh_env__:
print('This environment is currently active. If you really want to remove it, deactivate it first with "vox deactivate %s".\n' % name)
return None
env_path = join(builtins.__xonsh_env__['VIRTUALENV_HOME'], name)
rmtree(env_path)
env_path = os.path.join(builtins.__xonsh_env__['VIRTUALENV_HOME'], name)
shutil.rmtree(env_path)
print('Environment "%s" removed.\n' % name)
def show_help(self):

View file

@ -15,14 +15,14 @@ try:
except ImportError:
from xonsh import ply
import xonsh.wizard as wiz
from xonsh import __version__ as XONSH_VERSION
from xonsh.environ import is_template_string
from xonsh import platform
from xonsh.platform import is_readline_available, ptk_version
from xonsh import tools
from xonsh.wizard import (Wizard, Pass, Message, Save, Load, YesNo, Input,
PromptVisitor, While, StoreNonEmpty, create_truefalse_cond, YN, Unstorable,
Question)
from xonsh.platform import (is_readline_available, ptk_version,
PYTHON_VERSION_INFO, pygments_version, ON_POSIX, ON_LINUX, linux_distro,
ON_DARWIN, ON_WINDOWS, ON_CYGWIN, DEFAULT_ENCODING)
from xonsh.tools import (to_bool, is_string, print_exception, is_superuser,
color_style_names, print_color, color_style)
from xonsh.xontribs import xontrib_metadata, find_xontrib
@ -83,7 +83,7 @@ values are presented as pretty repr strings of their Python types.
will accept the default value for that entry.
""".format(hr=HR)
WIZARD_ENV_QUESTION = "Would you like to set env vars now, " + YN
WIZARD_ENV_QUESTION = "Would you like to set env vars now, " + wiz.YN
WIZARD_XONTRIB = """
{hr}
@ -101,7 +101,7 @@ The following describes all xontribs that have been registered with xonsh.
These come from users, 3rd party developers, or xonsh itself!
""".format(hr=HR)
WIZARD_XONTRIB_QUESTION = "Would you like to enable xontribs now, " + YN
WIZARD_XONTRIB_QUESTION = "Would you like to enable xontribs now, " + wiz.YN
WIZARD_TAIL = """
@ -109,45 +109,46 @@ Thanks for using the xonsh configuration wizard!"""
def make_fs():
def make_fs_wiz():
"""Makes the foreign shell part of the wizard."""
cond = create_truefalse_cond(prompt='Add a foreign shell, ' + YN)
fs = While(cond=cond, body=[
Input('shell name (e.g. bash): ', path='/foreign_shells/{idx}/shell'),
StoreNonEmpty('interactive shell [bool, default=True]: ',
converter=tools.to_bool,
show_conversion=True,
path='/foreign_shells/{idx}/interactive'),
StoreNonEmpty('login shell [bool, default=False]: ',
converter=tools.to_bool,
show_conversion=True,
path='/foreign_shells/{idx}/login'),
StoreNonEmpty("env command [str, default='env']: ",
path='/foreign_shells/{idx}/envcmd'),
StoreNonEmpty("alias command [str, default='alias']: ",
path='/foreign_shells/{idx}/aliascmd'),
StoreNonEmpty(("extra command line arguments [list of str, "
"default=[]]: "),
converter=ast.literal_eval,
show_conversion=True,
path='/foreign_shells/{idx}/extra_args'),
StoreNonEmpty('current environment [dict, default=None]: ',
converter=ast.literal_eval,
show_conversion=True,
path='/foreign_shells/{idx}/currenv'),
StoreNonEmpty('safely handle exceptions [bool, default=True]: ',
converter=tools.to_bool,
show_conversion=True,
path='/foreign_shells/{idx}/safe'),
StoreNonEmpty("pre-command [str, default='']: ",
path='/foreign_shells/{idx}/prevcmd'),
StoreNonEmpty("post-command [str, default='']: ",
path='/foreign_shells/{idx}/postcmd'),
StoreNonEmpty("foreign function command [str, default=None]: ",
path='/foreign_shells/{idx}/funcscmd'),
StoreNonEmpty("source command [str, default=None]: ",
path='/foreign_shells/{idx}/sourcer'),
Message(message='') # inserts a newline
cond = wiz.create_truefalse_cond(prompt='Add a foreign shell, ' + wiz.YN)
fs = wiz.While(cond=cond, body=[
wiz.Input('shell name (e.g. bash): ',
path='/foreign_shells/{idx}/shell'),
wiz.StoreNonEmpty('interactive shell [bool, default=True]: ',
converter=to_bool,
show_conversion=True,
path='/foreign_shells/{idx}/interactive'),
wiz.StoreNonEmpty('login shell [bool, default=False]: ',
converter=to_bool,
show_conversion=True,
path='/foreign_shells/{idx}/login'),
wiz.StoreNonEmpty("env command [str, default='env']: ",
path='/foreign_shells/{idx}/envcmd'),
wiz.StoreNonEmpty("alias command [str, default='alias']: ",
path='/foreign_shells/{idx}/aliascmd'),
wiz.StoreNonEmpty(("extra command line arguments [list of str, "
"default=[]]: "),
converter=ast.literal_eval,
show_conversion=True,
path='/foreign_shells/{idx}/extra_args'),
wiz.StoreNonEmpty('current environment [dict, default=None]: ',
converter=ast.literal_eval,
show_conversion=True,
path='/foreign_shells/{idx}/currenv'),
wiz.StoreNonEmpty('safely handle exceptions [bool, default=True]: ',
converter=to_bool,
show_conversion=True,
path='/foreign_shells/{idx}/safe'),
wiz.StoreNonEmpty("pre-command [str, default='']: ",
path='/foreign_shells/{idx}/prevcmd'),
wiz.StoreNonEmpty("post-command [str, default='']: ",
path='/foreign_shells/{idx}/postcmd'),
wiz.StoreNonEmpty("foreign function command [str, default=None]: ",
path='/foreign_shells/{idx}/funcscmd'),
wiz.StoreNonEmpty("source command [str, default=None]: ",
path='/foreign_shells/{idx}/sourcer'),
wiz.Message(message='') # inserts a newline
])
return fs
@ -177,19 +178,19 @@ def make_envvar(name):
if '\n' in default:
default = '\n' + _wrap_paragraphs(default, width=69)
curr = env.get(name)
if tools.is_string(curr) and is_template_string(curr):
if is_string(curr) and is_template_string(curr):
curr = curr.replace('{', '{{').replace('}', '}}')
curr = pformat(curr, width=69)
if '\n' in curr:
curr = '\n' + curr
msg = ENVVAR_MESSAGE.format(name=name, default=default, current=curr,
docstr=_wrap_paragraphs(vd.docstr, width=69))
mnode = Message(message=msg)
mnode = wiz.Message(message=msg)
ens = env.get_ensurer(name)
path = '/env/' + name
pnode = StoreNonEmpty(ENVVAR_PROMPT, converter=ens.convert,
show_conversion=True, path=path, retry=True,
store_raw=vd.store_as_str)
pnode = wiz.StoreNonEmpty(ENVVAR_PROMPT, converter=ens.convert,
show_conversion=True, path=path, retry=True,
store_raw=vd.store_as_str)
return mnode, pnode
@ -200,17 +201,17 @@ def _make_flat_wiz(kidfunc, *args):
if k is None:
continue
flatkids.extend(k)
wiz = Wizard(children=flatkids)
wiz = wiz.Wizard(children=flatkids)
return wiz
def make_env():
def make_env_wiz():
"""Makes an environment variable wizard."""
w = _make_flat_wiz(make_envvar, sorted(builtins.__xonsh_env__._docs.keys()))
return w
XONTRIB_PROMPT = '{BOLD_GREEN}Add this xontrib{NO_COLOR}, ' + YN
XONTRIB_PROMPT = '{BOLD_GREEN}Add this xontrib{NO_COLOR}, ' + wiz.YN
def _xontrib_path(visitor=None, node=None, val=None):
# need this to append only based on user-selected size
@ -238,14 +239,14 @@ def make_xontrib(xontrib, package):
msg += _wrap_paragraphs(desc, width=69)
if msg.endswith('\n'):
msg = msg[:-1]
mnode = Message(message=msg)
convert = lambda x: name if tools.to_bool(x) else Unstorable
pnode = StoreNonEmpty(XONTRIB_PROMPT, converter=convert,
path=_xontrib_path)
mnode = wiz.Message(message=msg)
convert = lambda x: name if to_bool(x) else Unstorable
pnode = wiz.StoreNonEmpty(XONTRIB_PROMPT, converter=convert,
path=_xontrib_path)
return mnode, pnode
def make_xontribs():
def make_xontribs_wiz():
"""Makes a xontrib wizard."""
md = xontrib_metadata()
pkgs = [md['packages'].get(d.get('package', None), {}) for d in md['xontribs']]
@ -253,7 +254,7 @@ def make_xontribs():
return w
def make_wizard(default_file=None, confirm=False):
def make_xonfig_wizard(default_file=None, confirm=False):
"""Makes a configuration wizard for xonsh config file.
Parameters
@ -263,37 +264,40 @@ def make_wizard(default_file=None, confirm=False):
confirm : bool, optional
Confirm that the main part of the wizard should be run.
"""
wiz = Wizard(children=[
Message(message=WIZARD_HEAD),
Load(default_file=default_file, check=True),
Message(message=WIZARD_FS),
make_fs(),
Message(message=WIZARD_ENV),
YesNo(question=WIZARD_ENV_QUESTION, yes=make_env(), no=Pass()),
Message(message=WIZARD_XONTRIB),
YesNo(question=WIZARD_XONTRIB_QUESTION, yes=make_xontribs(), no=Pass()),
Message(message='\n' + HR + '\n'),
Save(default_file=default_file, check=True),
Message(message=WIZARD_TAIL),
w = wiz.Wizard(children=[
wiz.Message(message=WIZARD_HEAD),
wiz.Load(default_file=default_file, check=True),
wiz.Message(message=WIZARD_FS),
make_fs_wiz(),
wiz.Message(message=WIZARD_ENV),
wiz.YesNo(question=WIZARD_ENV_QUESTION, yes=make_env_wiz(),
no=wiz.Pass()),
wiz.Message(message=WIZARD_XONTRIB),
wiz.YesNo(question=WIZARD_XONTRIB_QUESTION, yes=make_xontribs_wiz(),
no=wiz.Pass()),
wiz.Message(message='\n' + HR + '\n'),
wiz.Save(default_file=default_file, check=True),
wiz.Message(message=WIZARD_TAIL),
])
if confirm:
q = ("Would you like to run the xonsh configuration wizard now?\n\n"
"1. Yes\n2. No, but ask me later.\n3. No, and don't ask me again."
"\n\n1, 2, or 3 [default: 2]? ")
passer = Pass()
saver = Save(check=False, ask_filename=False, default_file=default_file)
wiz = Question(q, {1: wiz, 2: passer, 3: saver},
converter=lambda x: int(x) if x != '' else 2)
return wiz
passer = wiz.Pass()
saver = wiz.Save(check=False, ask_filename=False,
default_file=default_file)
w = wiz.Question(q, {1: wiz, 2: passer, 3: saver},
converter=lambda x: int(x) if x != '' else 2)
return w
def _wizard(ns):
env = builtins.__xonsh_env__
shell = builtins.__xonsh_shell__.shell
fname = env.get('XONSHCONFIG') if ns.file is None else ns.file
wiz = make_wizard(default_file=fname, confirm=ns.confirm)
w = make_xonfig_wizard(default_file=fname, confirm=ns.confirm)
tempenv = {'PROMPT': '', 'XONSH_STORE_STDOUT': False}
pv = PromptVisitor(wiz, store_in_history=False, multiline=False)
pv = wiz.PromptVisitor(w, store_in_history=False, multiline=False)
@contextmanager
def force_hide():
if env.get('XONSH_STORE_STDOUT') and hasattr(shell, '_force_hide'):
@ -306,10 +310,10 @@ def _wizard(ns):
try:
pv.visit()
except (KeyboardInterrupt, Exception):
tools.print_exception()
print_exception()
def _format_human(data):
def _xonfig_format_human(data):
wcol1 = wcol2 = 0
for key, val in data:
wcol1 = max(wcol1, len(key))
@ -323,7 +327,7 @@ def _format_human(data):
return s
def _format_json(data):
def _xonfig_format_json(data):
data = {k.replace(' ', '_'): v for k, v in data}
s = json.dumps(data, sort_keys=True, indent=1) + '\n'
return s
@ -332,24 +336,24 @@ def _format_json(data):
def _info(ns):
data = [
('xonsh', XONSH_VERSION),
('Python', '{}.{}.{}'.format(*platform.PYTHON_VERSION_INFO)),
('Python', '{}.{}.{}'.format(*PYTHON_VERSION_INFO)),
('PLY', ply.__version__),
('have readline', is_readline_available()),
('prompt toolkit', ptk_version() or None),
('shell type', builtins.__xonsh_env__.get('SHELL_TYPE')),
('pygments', platform.PYGMENTS_VERSION),
('on posix', platform.ON_POSIX),
('on linux', platform.ON_LINUX)]
if platform.ON_LINUX:
data.append(('distro', platform.LINUX_DISTRO))
('pygments', pygments_version()),
('on posix', ON_POSIX),
('on linux', ON_LINUX)]
if ON_LINUX:
data.append(('distro', linux_distro()))
data.extend([
('on darwin', platform.ON_DARWIN),
('on windows', platform.ON_WINDOWS),
('on cygwin', platform.ON_CYGWIN),
('is superuser', tools.IS_SUPERUSER),
('default encoding', platform.DEFAULT_ENCODING),
('on darwin', ON_DARWIN),
('on windows', ON_WINDOWS),
('on cygwin', ON_CYGWIN),
('is superuser', is_superuser()),
('default encoding', DEFAULT_ENCODING),
])
formatter = _format_json if ns.json else _format_human
formatter = _xonfig_format_json if ns.json else __xonfig_format_human
s = formatter(data)
return s
@ -357,7 +361,7 @@ def _info(ns):
def _styles(ns):
env = builtins.__xonsh_env__
curr = env.get('XONSH_COLOR_STYLE')
styles = sorted(tools.color_style_names())
styles = sorted(color_style_names())
if ns.json:
s = json.dumps(styles, sort_keys=True, indent=1)
print(s)
@ -369,7 +373,7 @@ def _styles(ns):
else:
lines.append(' ' + style)
s = '\n'.join(lines)
tools.print_color(s)
print_color(s)
def _str_colors(cmap, cols):
@ -415,19 +419,19 @@ def _tok_colors(cmap, cols):
def _colors(ns):
cols, _ = shutil.get_terminal_size()
if tools.ON_WINDOWS:
if ON_WINDOWS:
cols -= 1
cmap = tools.color_style()
cmap = color_style()
akey = next(iter(cmap))
if isinstance(akey, str):
s = _str_colors(cmap, cols)
else:
s = _tok_colors(cmap, cols)
tools.print_color(s)
print_color(s)
@functools.lru_cache()
def _create_parser():
@functools.lru_cache(1)
def _xonfig_create_parser():
p = ArgumentParser(prog='xonfig',
description='Manages xonsh configuration.')
subp = p.add_subparsers(title='action', dest='action')
@ -449,24 +453,24 @@ def _create_parser():
return p
_MAIN_ACTIONS = {
_XONFIG_MAIN_ACTIONS = {
'info': _info,
'wizard': _wizard,
'styles': _styles,
'colors': _colors,
}
def main(args=None):
def xonfig_main(args=None):
"""Main xonfig entry point."""
if not args or (args[0] not in _MAIN_ACTIONS and
args[0] not in {'-h', '--help'}):
args.insert(0, 'info')
parser = _create_parser()
parser = _xonfig_create_parser()
ns = parser.parse_args(args)
if ns.action is None: # apply default action
ns = parser.parse_args(['info'] + args)
return _MAIN_ACTIONS[ns.action](ns)
return _XONFIG_MAIN_ACTIONS[ns.action](ns)
if __name__ == '__main__':
main()
xonfig_main()

View file

@ -12,7 +12,9 @@ from importlib.util import find_spec
from xonsh.tools import print_color
XONTRIBS_JSON = os.path.splitext(__file__)[0] + '.json'
@functools.lru_cache(1)
def xontribs_json():
return os.path.join(os.path.dirname(__file__), 'xontribs.json')
def find_xontrib(name):
"""Finds a xontribution from its name."""
@ -49,7 +51,7 @@ def update_context(name, ctx=None):
@functools.lru_cache()
def xontrib_metadata():
"""Loads and returns the xontribs.json file."""
with open(XONTRIBS_JSON, 'r') as f:
with open(xontribs_json(), 'r') as f:
md = json.load(f)
return md
@ -129,7 +131,7 @@ _MAIN_ACTIONS = {
'list': _list,
}
def main(args=None, stdin=None):
def xontribs_main(args=None, stdin=None):
"""Alias that loads xontribs"""
if not args or (args[0] not in _MAIN_ACTIONS and
args[0] not in {'-h', '--help'}):