many PEP8 changes

This commit is contained in:
adam j hartz 2015-04-02 19:07:31 -04:00
parent ea33a25e8f
commit 4837f3ab44
15 changed files with 841 additions and 550 deletions

View file

@ -11,6 +11,7 @@ from warnings import warn
from xonsh.dirstack import dirs, pushd, popd
from xonsh.jobs import jobs, fg, bg, kill_all_jobs
def cd(args, stdin=None):
"""Changes the directory.
@ -55,7 +56,9 @@ def source_bash(args, stdin=None):
args = ' '.join(args)
input = 'source {0}\nenv >> {1}\n'.format(args, f.name)
try:
subprocess.check_output(['bash'], input=input, env=denv,
subprocess.check_output(['bash'],
input=input,
env=denv,
stderr=subprocess.PIPE,
universal_newlines=True)
except subprocess.CalledProcessError:
@ -74,7 +77,8 @@ def source_bash(args, stdin=None):
def bash_aliases():
"""Computes a dictionary of aliases based on Bash's aliases."""
try:
s = subprocess.check_output(['bash', '-i'], input='alias',
s = subprocess.check_output(['bash', '-i'],
input='alias',
stderr=subprocess.PIPE,
universal_newlines=True)
except subprocess.CalledProcessError:
@ -109,7 +113,7 @@ DEFAULT_ALIASES = {
'grep': ['grep', '--color=auto'],
'scp-resume': ['rsync', '--partial', '-h', '--progress', '--rsh=ssh'],
'ipynb': ['ipython', 'notebook', '--no-browser'],
}
}
if platform.system() == 'Darwin':
DEFAULT_ALIASES['ls'] = ['ls', '-G']

View file

@ -15,9 +15,10 @@ from ast import Module, Num, Expr, Str, Bytes, UnaryOp, UAdd, USub, Invert, \
from xonsh.tools import subproc_toks
STATEMENTS = (FunctionDef, ClassDef, Return, Delete, Assign, AugAssign, For,
While, If, With, Raise, Try, Assert, Import, ImportFrom, Global,
While, If, With, Raise, Try, Assert, Import, ImportFrom, Global,
Nonlocal, Expr, Pass, Break, Continue)
def leftmostname(node):
"""Attempts to find the first name in the tree."""
if isinstance(node, Name):
@ -39,6 +40,7 @@ def leftmostname(node):
rtn = None
return rtn
class CtxAwareTransformer(NodeTransformer):
"""Transforms a xonsh AST based to use subprocess calls when
the first name in an expression statement is not known in the context.
@ -84,7 +86,7 @@ class CtxAwareTransformer(NodeTransformer):
def ctxupdate(self, iterable):
"""Updated the most recent context."""
self.contexts[-1].update(iterable)
def ctxadd(self, value):
"""Adds a value the most recent context."""
self.contexts[-1].add(value)
@ -101,8 +103,11 @@ class CtxAwareTransformer(NodeTransformer):
line = self.lines[node.lineno - 1]
mincol = len(line) - len(line.lstrip())
maxcol = None if self.mode == 'eval' else node.col_offset
spline = subproc_toks(line, mincol=mincol, maxcol=maxcol,
returnline=False, lexer=self.parser.lexer)
spline = subproc_toks(line,
mincol=mincol,
maxcol=maxcol,
returnline=False,
lexer=self.parser.lexer)
try:
newnode = self.parser.parse(spline, mode=self.mode)
newnode = newnode.body
@ -123,7 +128,7 @@ class CtxAwareTransformer(NodeTransformer):
inscope = False
for ctx in reversed(self.contexts):
if lname in ctx:
inscope = True
inscope = True
break
return inscope
@ -140,7 +145,8 @@ class CtxAwareTransformer(NodeTransformer):
else:
newnode = self.try_subproc_toks(node)
if not isinstance(newnode, Expr):
newnode = Expr(value=newnode, lineno=node.lineno,
newnode = Expr(value=newnode,
lineno=node.lineno,
col_offset=node.col_offset)
return newnode

View file

@ -28,9 +28,14 @@ from xonsh.jobs import ProcProxy
ENV = None
BUILTINS_LOADED = False
INSPECTOR = Inspector()
LOCALE_CAT = {'LC_CTYPE': locale.LC_CTYPE, 'LC_MESSAGES': locale.LC_MESSAGES,
'LC_COLLATE': locale.LC_COLLATE, 'LC_NUMERIC': locale.LC_NUMERIC,
'LC_MONETARY': locale.LC_MONETARY, 'LC_TIME': locale.LC_TIME}
LOCALE_CATS = {
'LC_CTYPE': locale.LC_CTYPE,
'LC_MESSAGES': locale.LC_MESSAGES,
'LC_COLLATE': locale.LC_COLLATE,
'LC_NUMERIC': locale.LC_NUMERIC,
'LC_MONETARY': locale.LC_MONETARY,
'LC_TIME': locale.LC_TIME
}
class Env(MutableMapping):
@ -53,7 +58,7 @@ class Env(MutableMapping):
"""If no initial environment is given, os.environ is used."""
self._d = {}
if len(args) == 0 and len(kwargs) == 0:
args = (os.environ,)
args = (os.environ, )
for key, val in dict(*args, **kwargs).items():
self[key] = val
self._detyped = None
@ -118,9 +123,9 @@ class Env(MutableMapping):
else val
elif key == 'XONSH_HISTORY_SIZE' and not isinstance(val, int):
val = int(val)
elif key in LOCALE_CAT:
locale.setlocale(LOCALE_CAT[key], val)
val = locale.setlocale(LOCALE_CAT[key])
elif key in LOCALE_CATS:
locale.setlocale(LOCALE_CATS[key], val)
val = locale.setlocale(LOCALE_CATS[key])
self._d[key] = val
self._detyped = None
@ -181,7 +186,7 @@ class Aliases(MutableMapping):
# only once.
if callable(value):
if acc_args: # Partial application
return lambda args, stdin=None: value(acc_args+args,
return lambda args, stdin=None: value(acc_args + args,
stdin=stdin)
else:
return value
@ -195,7 +200,7 @@ class Aliases(MutableMapping):
else:
return self.eval_alias(self._raw[token],
seen_tokens | {token},
rest+acc_args)
rest + acc_args)
#
# Mutable mapping interface
@ -302,10 +307,13 @@ def iglobpath(s):
s = expand_path(s)
return iglob(s)
WRITER_MODES = {'>': 'w', '>>': 'a'}
def _run_callable_subproc(alias, args, captured=True, prev_proc=None,
def _run_callable_subproc(alias, args,
captured=True,
prev_proc=None,
stdout=None):
"""Helper for running callables as a subprocess."""
# compute stdin for callable
@ -481,8 +489,10 @@ def run_subproc(cmds, captured=True):
if os.name == 'posix':
subproc_kwargs['preexec_fn'] = _subproc_pre
try:
proc = Popen(aliased_cmd, universal_newlines=uninew,
env=ENV.detype(), stdin=stdin,
proc = Popen(aliased_cmd,
universal_newlines=uninew,
env=ENV.detype(),
stdin=stdin,
stdout=stdout, **subproc_kwargs)
except PermissionError:
cmd = aliased_cmd[0]
@ -505,13 +515,15 @@ def run_subproc(cmds, captured=True):
pids = [i.pid for i in procs]
if not isinstance(prev_proc, ProcProxy):
builtins.__xonsh_active_job__ = num
builtins.__xonsh_all_jobs__[num] = {'cmds': cmds,
'pids': pids,
'obj': prev_proc,
'started': time.time(),
'pgrp': os.getpgid(prev_proc.pid),
'status': 'running',
'bg': background}
builtins.__xonsh_all_jobs__[num] = {
'cmds': cmds,
'pids': pids,
'obj': prev_proc,
'started': time.time(),
'pgrp': os.getpgid(prev_proc.pid),
'status': 'running',
'bg': background
}
if background:
print_one_job(num)
return
@ -600,14 +612,24 @@ def unload_builtins():
builtins.quit = builtins.__xonsh_pyquit__
if not BUILTINS_LOADED:
return
names = ['__xonsh_env__', '__xonsh_help__', '__xonsh_superhelp__',
'__xonsh_regexpath__', '__xonsh_glob__', '__xonsh_exit__',
'__xonsh_pyexit__', '__xonsh_pyquit__',
'__xonsh_subproc_captured__', '__xonsh_subproc_uncaptured__',
'__xonsh_execer__', 'evalx', 'execx', 'compilex',
'default_aliases', '__xonsh_all_jobs__', '__xonsh_active_job__',
'__xonsh_ensure_list_of_strs__',
]
names = ['__xonsh_env__',
'__xonsh_help__',
'__xonsh_superhelp__',
'__xonsh_regexpath__',
'__xonsh_glob__',
'__xonsh_exit__',
'__xonsh_pyexit__',
'__xonsh_pyquit__',
'__xonsh_subproc_captured__',
'__xonsh_subproc_uncaptured__',
'__xonsh_execer__',
'evalx',
'execx',
'compilex',
'default_aliases',
'__xonsh_all_jobs__',
'__xonsh_active_job__',
'__xonsh_ensure_list_of_strs__', ]
for name in names:
if hasattr(builtins, name):
delattr(builtins, name)

View file

@ -11,14 +11,16 @@ from xonsh.built_ins import iglobpath
RE_DASHF = re.compile(r'-F\s+(\w+)')
XONSH_TOKENS = {'and ', 'as ', 'assert ', 'break', 'class ', 'continue',
'def ', 'del ', 'elif ', 'else', 'except ', 'finally:', 'for ', 'from ',
'global ', 'import ', 'if ', 'in ', 'is ', 'lambda ', 'nonlocal ', 'not ',
'or ', 'pass', 'raise ', 'return ', 'try:', 'while ', 'with ', 'yield ',
'+', '-', '/', '//', '%', '**', '|', '&', '~', '^', '>>', '<<', '<', '<=',
'>', '>=', '==', '!=', '->', '=', '+=', '-=', '*=', '/=', '%=', '**=',
'>>=', '<<=', '&=', '^=', '|=', '//=', ',', ';', ':', '?', '??', '$(',
'${', '$[', '..', '...'}
XONSH_TOKENS = {
'and ', 'as ', 'assert ', 'break', 'class ', 'continue', 'def ', 'del ',
'elif ', 'else', 'except ', 'finally:', 'for ', 'from ', 'global ',
'import ', 'if ', 'in ', 'is ', 'lambda ', 'nonlocal ', 'not ', 'or ',
'pass', 'raise ', 'return ', 'try:', 'while ', 'with ', 'yield ', '+', '-',
'/', '//', '%', '**', '|', '&', '~', '^', '>>', '<<', '<', '<=', '>', '>=',
'==', '!=', '->', '=', '+=', '-=', '*=', '/=', '%=', '**=', '>>=', '<<=',
'&=', '^=', '|=', '//=', ',', ';', ':', '?', '??', '$(', '${', '$[', '..',
'...'
}
BASH_COMPLETE_SCRIPT = """source {filename}
COMP_WORDS=({line})
@ -30,6 +32,7 @@ COMP_CWORD={n}
for ((i=0;i<${{#COMPREPLY[*]}};i++)) do echo ${{COMPREPLY[i]}}; done
"""
class Completer(object):
"""This provides a list of optional completions for the xonsh shell."""
@ -85,8 +88,9 @@ class Completer(object):
rtn |= {s for s in dir(builtins) if s.startswith(prefix)}
rtn |= {s + space for s in builtins.aliases if s.startswith(prefix)}
if prefix.startswith('$'):
env = builtins.__xonsh_env__
key = prefix[1:]
rtn |= {'$'+k for k in builtins.__xonsh_env__ if k.startswith(key)}
rtn |= {'$' + k for k in env if k.startswith(key)}
rtn |= self.path_complete(prefix)
return sorted(rtn)
@ -139,13 +143,20 @@ class Completer(object):
if len(prefix) == 0:
prefix = '""'
n += 1
script = BASH_COMPLETE_SCRIPT.format(filename=fnme, line=line, n=n,
func=func, cmd=cmd, end=endidx+1, prefix=prefix, prev=prev)
out = subprocess.check_output(['bash'], input=script,
universal_newlines=True,
script = BASH_COMPLETE_SCRIPT.format(filename=fnme,
line=line,
n=n,
func=func,
cmd=cmd,
end=endidx + 1,
prefix=prefix,
prev=prev)
out = subprocess.check_output(['bash'],
input=script,
universal_newlines=True,
stderr=subprocess.PIPE)
space = ' '
rtn = {s+space if s[-1:].isalnum() else s for s in out.splitlines()}
rtn = {s + space if s[-1:].isalnum() else s for s in out.splitlines()}
return rtn
def _source_completions(self):
@ -161,7 +172,8 @@ class Completer(object):
if len(input) == 0:
return
input.append('complete -p\n')
out = subprocess.check_output(['bash'], input='\n'.join(input),
out = subprocess.check_output(['bash'],
input='\n'.join(input),
universal_newlines=True)
for line in out.splitlines():
head, cmd = line.rsplit(' ', 1)
@ -181,12 +193,15 @@ class Completer(object):
declare_f = 'declare -F '
input += [declare_f + f for f in self.bash_complete_funcs.values()]
input.append('shopt -u extdebug\n')
out = subprocess.check_output(['bash'], input='\n'.join(input),
out = subprocess.check_output(['bash'],
input='\n'.join(input),
universal_newlines=True)
func_files = {}
for line in out.splitlines():
parts = line.split()
func_files[parts[0]] = parts[-1]
self.bash_complete_files = {cmd: func_files[func] for cmd, func in
self.bash_complete_funcs.items()
if func in func_files}
self.bash_complete_files = {
cmd: func_files[func]
for cmd, func in self.bash_complete_funcs.items()
if func in func_files
}

View file

@ -61,12 +61,12 @@ def pushd(args, stdin=None):
if num == len(DIRSTACK):
new_pwd = None
else:
new_pwd = DIRSTACK.pop(len(DIRSTACK)-1-num)
new_pwd = DIRSTACK.pop(len(DIRSTACK) - 1 - num)
elif args.dir.startswith(BACKWARD):
if num == 0:
new_pwd = None
else:
new_pwd = DIRSTACK.pop(num-1)
new_pwd = DIRSTACK.pop(num - 1)
else:
e = 'Invalid argument to pushd: {0}\n'
return None, e.format(args.dir)
@ -139,13 +139,13 @@ def popd(args, stdin=None):
new_pwd = DIRSTACK.pop(0)
else:
new_pwd = None
DIRSTACK.pop(len(DIRSTACK)-1-num)
DIRSTACK.pop(len(DIRSTACK) - 1 - num)
elif args.dir.startswith(BACKWARD):
if num == 0:
new_pwd = DIRSTACK.pop(0)
else:
new_pwd = None
DIRSTACK.pop(num-1)
DIRSTACK.pop(num - 1)
else:
e = 'Invalid argument to popd: {0}\n'
return None, e.format(args.dir)
@ -201,7 +201,7 @@ def dirs(args, stdin=None):
if args.verbose:
out = ''
pad = len(str(len(o)-1))
pad = len(str(len(o) - 1))
for (ix, e) in enumerate(o):
blanks = ' ' * (pad - len(str(ix)))
out += '\n{0}{1} {2}'.format(blanks, ix, e)
@ -230,14 +230,14 @@ def dirs(args, stdin=None):
if N.startswith(BACKWARD):
idx = num
elif N.startswith(FORWARD):
idx = len(o)-1-num
idx = len(o) - 1 - num
else:
e = 'Invalid argument to dirs: {0}\n'
return None, e.format(N)
out = o[idx]
return out+'\n', None
return out + '\n', None
pushd_parser = ArgumentParser(prog="pushd")

View file

@ -12,6 +12,7 @@ from warnings import warn
from xonsh import __version__ as XONSH_VERSION
from xonsh.tools import TERM_COLORS
def current_branch(cwd=None):
"""Gets the branch for a current working directory. Returns None
if the cwd is not a repository. This currently only works for git,
@ -22,35 +23,38 @@ def current_branch(cwd=None):
# step out completely if git is not installed
try:
binary_location = subprocess.check_output(['which', 'git'], cwd=cwd,
stderr=subprocess.PIPE,
universal_newlines=True)
binary_location = subprocess.check_output(['which', 'git'],
cwd=cwd,
stderr=subprocess.PIPE,
universal_newlines=True)
if not binary_location:
return branch
except subprocess.CalledProcessError:
return branch
prompt_scripts = [
'/usr/lib/git-core/git-sh-prompt',
'/usr/local/etc/bash_completion.d/git-prompt.sh'
]
prompt_scripts = ['/usr/lib/git-core/git-sh-prompt',
'/usr/local/etc/bash_completion.d/git-prompt.sh']
for script in prompt_scripts:
# note that this is about 10x faster than bash -i "__git_ps1"
_input = ('source {}; __git_ps1 "${{1:-%s}}"'.format(script))
try:
branch = subprocess.check_output(['bash',], cwd=cwd, input=_input,
stderr=subprocess.PIPE,
universal_newlines=True) or None
branch = subprocess.check_output(['bash', ],
cwd=cwd,
input=_input,
stderr=subprocess.PIPE,
universal_newlines=True) or None
except subprocess.CalledProcessError:
continue
# fall back to using the git binary if the above failed
if branch is None:
try:
s = subprocess.check_output(['git', 'rev-parse','--abbrev-ref', 'HEAD'],
stderr=subprocess.PIPE, cwd=cwd,
universal_newlines=True)
cmd = ['git', 'rev-parse', '--abbrev-ref', 'HEAD']
s = subprocess.check_output(cmd,
stderr=subprocess.PIPE,
cwd=cwd,
universal_newlines=True)
s = s.strip()
if len(s) > 0:
branch = s
@ -64,6 +68,7 @@ default_prompt = ('{BOLD_GREEN}{user}@{hostname}{BOLD_BLUE} '
'{cwd}{BOLD_RED}{curr_branch} {BOLD_BLUE}${NO_COLOR} ')
default_title = '{user}@{hostname}: {cwd} | xonsh'
def format_prompt(template=default_prompt):
"""Formats a xonsh prompt template string.
@ -85,18 +90,16 @@ def format_prompt(template=default_prompt):
cwd = env['PWD']
branch = current_branch(cwd=cwd)
branch = '' if branch is None else ' ' + branch
p = template.format(
user=env.get('USER', '<user>'),
hostname=socket.gethostname(),
cwd=cwd.replace(env['HOME'], '~'),
curr_branch=branch,
**TERM_COLORS
)
p = template.format(user=env.get('USER', '<user>'),
hostname=socket.gethostname(),
cwd=cwd.replace(env['HOME'], '~'),
curr_branch=branch, **TERM_COLORS)
return p
RE_HIDDEN = re.compile('\001.*?\002')
def multiline_prompt():
"""Returns the filler text for the prompt in multiline scenarios."""
curr = builtins.__xonsh_env__.get('PROMPT', "set '$PROMPT = ...' $ ")
@ -114,7 +117,7 @@ def multiline_prompt():
dots = dots() if callable(dots) else dots
if dots is None or len(dots) == 0:
return ''
return (dots*(headlen//len(dots))) + dots[:headlen%len(dots)] + tail
return (dots * (headlen // len(dots))) + dots[:headlen % len(dots)] + tail
BASE_ENV = {
@ -132,14 +135,18 @@ BASE_ENV = {
'LC_MONETARY': locale.setlocale(locale.LC_MONETARY),
'LC_MESSAGES': locale.setlocale(locale.LC_MESSAGES),
'LC_NUMERIC': locale.setlocale(locale.LC_NUMERIC),
}
}
if platform.system() == 'Darwin':
BASE_ENV['BASH_COMPLETIONS'] = ['/usr/local/etc/bash_completion',
'/opt/local/etc/profile.d/bash_completion.sh']
BASE_ENV['BASH_COMPLETIONS'] = [
'/usr/local/etc/bash_completion',
'/opt/local/etc/profile.d/bash_completion.sh'
]
else:
BASE_ENV['BASH_COMPLETIONS'] = ['/etc/bash_completion',
'/usr/share/bash-completion/completions/git']
BASE_ENV['BASH_COMPLETIONS'] = [
'/etc/bash_completion', '/usr/share/bash-completion/completions/git'
]
def bash_env():
"""Attempts to compute the bash envinronment variables."""
@ -147,7 +154,9 @@ def bash_env():
if hasattr(builtins, '__xonsh_env__'):
currenv = builtins.__xonsh_env__.detype()
try:
s = subprocess.check_output(['bash', '-i'], input='env', env=currenv,
s = subprocess.check_output(['bash', '-i'],
input='env',
env=currenv,
stderr=subprocess.PIPE,
universal_newlines=True)
except subprocess.CalledProcessError:
@ -156,6 +165,7 @@ def bash_env():
env = dict(items)
return env
def xonshrc_context(rcfile=None, execer=None):
"""Attempts to read in xonshrc file, and return the contents."""
if rcfile is None or execer is None or not os.path.isfile(rcfile):
@ -176,6 +186,7 @@ def xonshrc_context(rcfile=None, execer=None):
execer.filename = fname
return env
def default_env(env=None):
"""Constructs a default xonsh environment."""
# in order of increasing precedence

View file

@ -16,8 +16,11 @@ from xonsh.built_ins import load_builtins, unload_builtins
class Execer(object):
"""Executes xonsh code in a context."""
def __init__(self, filename='<xonsh-code>', debug_level=0,
parser_args=None, unload=True):
def __init__(self,
filename='<xonsh-code>',
debug_level=0,
parser_args=None,
unload=True):
"""Parameters
----------
filename : str, optional
@ -97,7 +100,10 @@ class Execer(object):
if isinstance(input, types.CodeType):
code = input
else:
code = self.compile(input=input, glbs=glbs, locs=locs, mode='eval',
code = self.compile(input=input,
glbs=glbs,
locs=locs,
mode='eval',
stacklevel=stacklevel)
if code is None:
return None # handles comment only input
@ -108,7 +114,10 @@ class Execer(object):
if isinstance(input, types.CodeType):
code = input
else:
code = self.compile(input=input, glbs=glbs, locs=locs, mode=mode,
code = self.compile(input=input,
glbs=glbs,
locs=locs,
mode=mode,
stacklevel=stacklevel)
if code is None:
return None # handles comment only input
@ -120,7 +129,8 @@ class Execer(object):
original_error = None
while not parsed:
try:
tree = self.parser.parse(input, filename=self.filename,
tree = self.parser.parse(input,
filename=self.filename,
mode=mode,
debug_level=self.debug_level)
parsed = True
@ -154,8 +164,10 @@ class Execer(object):
continue
maxcol = line.find(';', last_error_col)
maxcol = None if maxcol < 0 else maxcol + 1
sbpline = subproc_toks(line, returnline=True,
maxcol=maxcol, lexer=self.parser.lexer)
sbpline = subproc_toks(line,
returnline=True,
maxcol=maxcol,
lexer=self.parser.lexer)
if sbpline is None:
# subprocess line had no valid tokens, likely because
# it only contained a comment.

View file

@ -78,7 +78,7 @@ class XonshImportHook(MetaPathFinder, SourceLoader):
src = src if src.endswith('\n') else src + '\n'
execer = self.execer
execer.filename = filename
ctx = {} # dummy for modules
ctx = {} # dummy for modules
code = execer.compile(src, glbs=ctx, locs=ctx)
return code

View file

@ -15,6 +15,10 @@ import types
import inspect
import linecache
import io as stdlib_io
from xonsh import openpy
from xonsh.tools import cast_unicode, safe_hasattr, string_types, indent
if sys.version_info[0] > 2:
ISPY3K = True
from itertools import zip_longest
@ -22,26 +26,27 @@ else:
ISPY3K = False
from itertools import izip_longest as zip_longest
from xonsh import openpy
from xonsh.tools import cast_unicode, safe_hasattr, string_types, indent
# builtin docstrings to ignore
_func_call_docstring = types.FunctionType.__call__.__doc__
_object_init_docstring = object.__init__.__doc__
_builtin_type_docstrings = {t.__doc__ for t in (types.ModuleType,
types.MethodType, types.FunctionType)}
_builtin_type_docstrings = {
t.__doc__
for t in (types.ModuleType, types.MethodType, types.FunctionType)
}
_builtin_func_type = type(all)
_builtin_meth_type = type(str.upper) # Bound methods have the same type as builtin functions
_builtin_meth_type = type(
str.upper) # Bound methods have the same type as builtin functions
info_fields = ['type_name', 'base_class', 'string_form', 'namespace',
'length', 'file', 'definition', 'docstring', 'source',
'init_definition', 'class_docstring', 'init_docstring',
'call_def', 'call_docstring',
# These won't be printed but will be used to determine how to
# format the object
'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name'
]
info_fields = [
'type_name', 'base_class', 'string_form', 'namespace', 'length', 'file',
'definition', 'docstring', 'source', 'init_definition', 'class_docstring',
'init_docstring', 'call_def', 'call_docstring',
# These won't be printed but will be used to determine how to
# format the object
'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name'
]
def object_info(**kw):
@ -70,10 +75,11 @@ def get_encoding(obj):
# Print only text files, not extension binaries. Note that
# getsourcelines returns lineno with 1-offset and page() uses
# 0-offset, so we must adjust.
with stdlib_io.open(ofile, 'rb') as buffer: # Tweaked to use io.open for Python 2
with stdlib_io.open(ofile, 'rb') as buffer:
encoding, lines = openpy.detect_encoding(buffer.readline)
return encoding
def getdoc(obj):
"""Stable wrapper around inspect.getdoc.
@ -91,7 +97,7 @@ def getdoc(obj):
# if we get extra info, we add it to the normal docstring.
if isinstance(ds, string_types):
return inspect.cleandoc(ds)
try:
docstr = inspect.getdoc(obj)
encoding = get_encoding(obj)
@ -103,7 +109,7 @@ def getdoc(obj):
return None
def getsource(obj,is_binary=False):
def getsource(obj, is_binary=False):
"""Wrapper around inspect.getsource.
This can be modified by other projects to provide customized source
@ -116,19 +122,19 @@ def getsource(obj,is_binary=False):
Optional inputs:
- is_binary: whether the object is known to come from a binary source.
This implementation will skip returning any output for binary objects, but
custom extractors may know how to meaningfully process them."""
This implementation will skip returning any output for binary objects,
but custom extractors may know how to meaningfully process them."""
if is_binary:
return None
else:
# get source if obj was decorated with @decorator
if hasattr(obj,"__wrapped__"):
if hasattr(obj, "__wrapped__"):
obj = obj.__wrapped__
try:
src = inspect.getsource(obj)
except TypeError:
if hasattr(obj,'__class__'):
if hasattr(obj, '__class__'):
src = inspect.getsource(obj.__class__)
encoding = get_encoding(obj)
return cast_unicode(src, encoding=encoding)
@ -136,14 +142,16 @@ def getsource(obj,is_binary=False):
def is_simple_callable(obj):
"""True if obj is a function ()"""
return (inspect.isfunction(obj) or inspect.ismethod(obj) or \
isinstance(obj, _builtin_func_type) or isinstance(obj, _builtin_meth_type))
return (inspect.isfunction(obj) or
inspect.ismethod(obj) or
isinstance(obj, _builtin_func_type) or
isinstance(obj, _builtin_meth_type))
def getargspec(obj):
"""Wrapper around :func:`inspect.getfullargspec` on Python 3, and
:func:inspect.getargspec` on Python 2.
In addition to functions and methods, this can also handle objects with a
``__call__`` attribute.
"""
@ -203,7 +211,7 @@ def call_tip(oinfo, format_call=True):
if has_self:
argspec['args'] = argspec['args'][1:]
call_line = oinfo['name']+format_argspec(argspec)
call_line = oinfo['name'] + format_argspec(argspec)
# Now get docstring.
# The priority is: call docstring, constructor docstring, main one.
@ -211,7 +219,7 @@ def call_tip(oinfo, format_call=True):
if doc is None:
doc = oinfo.get('init_docstring')
if doc is None:
doc = oinfo.get('docstring','')
doc = oinfo.get('docstring', '')
return call_line, doc
@ -272,7 +280,7 @@ def find_source_lines(obj):
# get source if obj was decorated with @decorator
if safe_hasattr(obj, '__wrapped__'):
obj = obj.__wrapped__
try:
try:
lineno = inspect.getsourcelines(obj)[1]
@ -294,7 +302,7 @@ class Inspector(object):
def __init__(self, str_detail_level=0):
self.str_detail_level = str_detail_level
def _getdef(self,obj,oname=''):
def _getdef(self, obj, oname=''):
"""Return the call signature for any callable object.
If any exception is generated, None is returned instead and the
@ -332,9 +340,9 @@ class Inspector(object):
elif (not ISPY3K) and type(obj) is types.InstanceType:
obj = obj.__call__
output = self._getdef(obj,oname)
output = self._getdef(obj, oname)
if output is None:
self.noinfo('definition header',oname)
self.noinfo('definition header', oname)
else:
print(header, output, end=' ', file=sys.stdout)
@ -342,7 +350,7 @@ class Inspector(object):
"""Print the docstring for any object.
Optional
-formatter: a function to run the docstring through for specially
formatted docstrings.
"""
@ -358,14 +366,14 @@ class Inspector(object):
if init_ds is not None:
lines.append(head("Init docstring:"))
lines.append(indent(init_ds))
elif hasattr(obj,'__call__'):
elif hasattr(obj, '__call__'):
call_ds = getdoc(obj.__call__)
if call_ds:
lines.append(head("Call docstring:"))
lines.append(indent(call_ds))
if not lines:
self.noinfo('documentation',oname)
self.noinfo('documentation', oname)
else:
print('\n'.join(lines))
@ -376,7 +384,7 @@ class Inspector(object):
try:
src = getsource(obj)
except:
self.noinfo('source',oname)
self.noinfo('source', oname)
else:
print(src)
@ -399,7 +407,8 @@ class Inspector(object):
# Print only text files, not extension binaries. Note that
# getsourcelines returns lineno with 1-offset and page() uses
# 0-offset, so we must adjust.
print(openpy.read_py_file(ofile, skip_encoding_cookie=False), lineno - 1)
o = openpy.read_py_file(ofile, skip_encoding_cookie=False)
print(o, lineno - 1)
def _format_fields(self, fields, title_width=0):
"""Formats a list of fields for display.
@ -418,25 +427,23 @@ class Inspector(object):
if len(content.splitlines()) > 1:
title = title + ":\n"
else:
title = (title+":").ljust(title_width)
title = (title + ":").ljust(title_width)
out.append(cast_unicode(title) + cast_unicode(content))
return "\n".join(out)
# The fields to be displayed by pinfo: (fancy_name, key_in_info_dict)
pinfo_fields1 = [("Type", "type_name")]
pinfo_fields2 = [("String form", "string_form")]
pinfo_fields3 = [("Length", "length"),
("File", "file"),
("Definition", "definition"),
]
("Definition", "definition"), ]
pinfo_fields_obj = [("Class docstring", "class_docstring"),
("Init docstring", "init_docstring"),
("Call def", "call_def"),
("Call docstring", "call_docstring"),
]
("Call docstring", "call_docstring"), ]
def pinfo(self, obj, oname='', info=None, detail_level=0):
"""Show detailed information about an object.
@ -452,31 +459,39 @@ class Inspector(object):
detail_level : int, optional
if set to 1, more information is given.
"""
info = self.info(obj, oname=oname, info=info, detail_level=detail_level)
info = self.info(obj,
oname=oname,
info=info,
detail_level=detail_level)
displayfields = []
def add_fields(fields):
for title, key in fields:
field = info[key]
if field is not None:
displayfields.append((title, field.rstrip()))
add_fields(self.pinfo_fields1)
# Base class for old-style instances
if (not ISPY3K) and isinstance(obj, types.InstanceType) and info['base_class']:
displayfields.append(("Base Class", info['base_class'].rstrip()))
if ((not ISPY3K) and
isinstance(obj, types.InstanceType) and
info['base_class']):
o = ("Base Class", info['base_class'].rstrip())
displayfields.append(o)
add_fields(self.pinfo_fields2)
# Namespace
if info['namespace'] is not None and info['namespace'] != 'Interactive':
displayfields.append(("Namespace", info['namespace'].rstrip()))
if (info['namespace'] is not None and
info['namespace'] != 'Interactive'):
displayfields.append(("Namespace", info['namespace'].rstrip()))
add_fields(self.pinfo_fields3)
if info['isclass'] and info['init_definition']:
displayfields.append(("Init definition",
info['init_definition'].rstrip()))
info['init_definition'].rstrip()))
# Source or docstring, depending on detail level and whether
# source found.
if detail_level > 0 and info['source'] is not None:
@ -488,7 +503,7 @@ class Inspector(object):
if info['isclass']:
if info['init_docstring'] is not None:
displayfields.append(("Init docstring",
info['init_docstring']))
info['init_docstring']))
# Info for objects:
else:
@ -541,8 +556,8 @@ class Inspector(object):
# store output in a dict, we initialize it here and fill it as we go
out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic)
string_max = 200 # max size of strings to show (snipped if longer)
shalf = int((string_max -5)/2)
string_max = 200 # max size of strings to show (snipped if longer)
shalf = int((string_max - 5) / 2)
if ismagic:
obj_type_name = 'Magic function'
@ -555,17 +570,18 @@ class Inspector(object):
try:
bclass = obj.__class__
out['base_class'] = str(bclass)
except: pass
except:
pass
# String form, but snip if too long in ? form (full in ??)
if detail_level >= self.str_detail_level:
try:
ostr = str(obj)
str_head = 'string_form'
if not detail_level and len(ostr)>string_max:
if not detail_level and len(ostr) > string_max:
ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:]
ostr = ("\n" + " " * len(str_head.expandtabs())).\
join(q.strip() for q in ostr.split("\n"))
join(q.strip() for q in ostr.split("\n"))
out[str_head] = ostr
except:
pass
@ -576,7 +592,8 @@ class Inspector(object):
# Length (for strings and lists)
try:
out['length'] = str(len(obj))
except: pass
except:
pass
# Filename where object was defined
binary_file = False
@ -589,13 +606,14 @@ class Inspector(object):
if fname.endswith(('.so', '.dll', '.pyd')):
binary_file = True
elif fname.endswith('<string>'):
fname = 'Dynamically generated function. No source code available.'
fname = 'Dynamically generated function. '
'No source code available.'
out['file'] = fname
# Docstrings only in detail 0 mode, since source contains them (we
# avoid repetitions). If source fails, we add them back, see below.
if ds and detail_level == 0:
out['docstring'] = ds
out['docstring'] = ds
# Original source code for any callable
if detail_level:
@ -617,18 +635,17 @@ class Inspector(object):
if ds and source is None:
out['docstring'] = ds
# Constructor docstring for classes
if inspect.isclass(obj):
out['isclass'] = True
# reconstruct the function definition and print it:
try:
obj_init = obj.__init__
obj_init = obj.__init__
except AttributeError:
init_def = init_ds = None
else:
init_def = self._getdef(obj_init,oname)
init_ds = getdoc(obj_init)
init_def = self._getdef(obj_init, oname)
init_ds = getdoc(obj_init)
# Skip Python's auto-generated docstrings
if init_ds == _object_init_docstring:
init_ds = None
@ -652,7 +669,7 @@ class Inspector(object):
# objects which use instance-customized docstrings.
if ds:
try:
cls = getattr(obj,'__class__')
cls = getattr(obj, '__class__')
except:
class_ds = None
else:
@ -679,8 +696,8 @@ class Inspector(object):
call_def = self._getdef(obj.__call__, oname)
if call_def:
call_def = call_def
# it may never be the case that call def and definition differ,
# but don't include the same signature twice
# it may never be the case that call def and definition
# differ, but don't include the same signature twice
if call_def != out.get('definition'):
out['call_def'] = call_def
call_ds = getdoc(obj.__call__)
@ -718,4 +735,3 @@ class Inspector(object):
argspec_dict['varkw'] = argspec_dict.pop('keywords')
return object_info(**out)

View file

@ -67,5 +67,6 @@ def main(argv=None):
signal.signal(signal.SIGTSTP, signal.SIG_IGN)
shell.cmdloop()
if __name__ == '__main__':
main()

View file

@ -28,7 +28,7 @@ try:
from tokenize import detect_encoding
except ImportError:
from codecs import lookup, BOM_UTF8
# Copied from Python 3.2 tokenize
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
@ -40,7 +40,7 @@ except ImportError:
enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
return "iso-8859-1"
return orig_enc
# Copied from Python 3.2 tokenize
def detect_encoding(readline):
"""
@ -62,6 +62,7 @@ except ImportError:
bom_found = False
encoding = None
default = 'utf-8'
def read_or_stop():
try:
return readline()
@ -113,6 +114,7 @@ except ImportError:
return default, [first, second]
try:
# Available in Python 3.2 and above.
from tokenize import open
@ -122,12 +124,13 @@ except ImportError:
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
buffer = io.open(filename, 'rb') # Tweaked to use io.open for Python 2
buffer = io.open(filename, 'rb') # Tweaked to use io.open for Python 2
encoding, lines = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
text.mode = 'r'
return text
return text
def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True):
"""Converts a bytes string with python source code to unicode.
@ -155,6 +158,7 @@ def source_to_unicode(txt, errors='replace', skip_encoding_cookie=True):
else:
return text.read()
def strip_encoding_cookie(filelike):
"""Generator to pull lines from a text-mode file, skipping the encoding
cookie if it is found in the first two lines.
@ -169,10 +173,11 @@ def strip_encoding_cookie(filelike):
yield second
except StopIteration:
return
for line in it:
yield line
def read_py_file(filename, skip_encoding_cookie=True):
"""Read a Python file, using the encoding declared inside the file.
@ -189,12 +194,13 @@ def read_py_file(filename, skip_encoding_cookie=True):
-------
A unicode string containing the contents of the file.
"""
with open(filename) as f: # the open function defined in this module.
with open(filename) as f: # the open function defined in this module.
if skip_encoding_cookie:
return "".join(strip_encoding_cookie(f))
else:
return f.read()
def read_py_url(url, errors='replace', skip_encoding_cookie=True):
"""Read a Python file from a URL, using the encoding declared inside the file.
@ -216,25 +222,28 @@ def read_py_url(url, errors='replace', skip_encoding_cookie=True):
"""
# Deferred import for faster start
try:
from urllib.request import urlopen # Py 3
from urllib.request import urlopen # Py 3
except ImportError:
from urllib import urlopen
response = urlopen(url)
buffer = io.BytesIO(response.read())
return source_to_unicode(buffer, errors, skip_encoding_cookie)
def _list_readline(x):
"""Given a list, returns a readline() function that returns the next element
with each call.
"""
x = iter(x)
def readline():
return next(x)
return readline
# Code for going between .py files and cached .pyc files ----------------------
try: # Python 3.2, see PEP 3147
try: # Python 3.2, see PEP 3147
from imp import source_from_cache, cache_from_source
except ImportError:
# Python <= 3.1: .pyc files go next to .py
@ -244,7 +253,7 @@ except ImportError:
raise ValueError('Not a cached Python file extension', ext)
# Should we look for .pyw files?
return basename + '.py'
def cache_from_source(path, debug_override=None):
if debug_override is None:
debug_override = __debug__

File diff suppressed because it is too large Load diff

View file

@ -13,33 +13,24 @@ class XonshSubprocLexer(BashLexer):
name = 'Xonsh subprocess lexer'
tokens = {
'root': [
(r'`[^`]*?`', String.Backtick),
inherit,
]
}
tokens = {'root': [(r'`[^`]*?`', String.Backtick), inherit, ]}
ROOT_TOKENS = [
(r'\?', Keyword),
(r'\$\w+', Name.Variable),
(r'\$\{', Keyword, ('pymode',)),
(r'\$\(', Keyword, ('subproc',)),
(r'\$\[', Keyword, ('subproc',)),
(r'@\(', Keyword, ('pymode',)),
inherit,
]
ROOT_TOKENS = [(r'\?', Keyword),
(r'\$\w+', Name.Variable),
(r'\$\{', Keyword, ('pymode', )),
(r'\$\(', Keyword, ('subproc', )),
(r'\$\[', Keyword, ('subproc', )),
(r'@\(', Keyword, ('pymode', )),
inherit, ]
PYMODE_TOKENS = [
(r'(.+)(\))', bygroups(using(this), Keyword), '#pop'),
(r'(.+)(\})', bygroups(using(this), Keyword), '#pop'),
]
PYMODE_TOKENS = [(r'(.+)(\))', bygroups(using(this), Keyword), '#pop'),
(r'(.+)(\})', bygroups(using(this), Keyword), '#pop'), ]
SUBPROC_TOKENS = [
(r'(.+)(\))', bygroups(using(XonshSubprocLexer), Keyword), '#pop'),
(r'(.+)(\])', bygroups(using(XonshSubprocLexer), Keyword), '#pop'),
]
]
class XonshLexer(PythonLexer):
@ -49,10 +40,11 @@ class XonshLexer(PythonLexer):
aliases = ['xonsh', 'xsh']
filenames = ['*.xsh', '*xonshrc']
tokens = {'root': list(ROOT_TOKENS),
'pymode': PYMODE_TOKENS,
'subproc': SUBPROC_TOKENS,
}
tokens = {
'root': list(ROOT_TOKENS),
'pymode': PYMODE_TOKENS,
'subproc': SUBPROC_TOKENS,
}
class XonshConsoleLexer(PythonLexer):
@ -63,19 +55,16 @@ class XonshConsoleLexer(PythonLexer):
filenames = []
tokens = {
'root': [
(r'^(>>>|\.\.\.) ', Generic.Prompt),
(r'\n(>>>|\.\.\.)', Generic.Prompt),
(r'\n(?![>.][>.][>.] )([^\n]*)', Generic.Output),
(r'\n(?![>.][>.][>.] )(.*?)$', Generic.Output),
] + ROOT_TOKENS,
'root': [(r'^(>>>|\.\.\.) ', Generic.Prompt),
(r'\n(>>>|\.\.\.)', Generic.Prompt),
(r'\n(?![>.][>.][>.] )([^\n]*)', Generic.Output),
(r'\n(?![>.][>.][>.] )(.*?)$', Generic.Output), ] + ROOT_TOKENS,
'pymode': PYMODE_TOKENS,
'subproc': SUBPROC_TOKENS,
}
}
# XonshLexer & XonshSubprocLexer have to refernce each other
XonshSubprocLexer.tokens['root'] = [
(r'(\$\{)(.*)(\})', bygroups(Keyword, using(XonshLexer), Keyword)),
(r'(@\()(.+)(\))', bygroups(Keyword, using(XonshLexer), Keyword)),
] + XonshSubprocLexer.tokens['root']
] + XonshSubprocLexer.tokens['root']

View file

@ -29,8 +29,8 @@ def setup_readline():
readline.set_completer_delims(' \t\n')
RL_LIB = lib = ctypes.cdll.LoadLibrary(readline.__file__)
try:
RL_COMPLETION_SUPPRESS_APPEND = ctypes.c_int.in_dll(lib,
'rl_completion_suppress_append')
RL_COMPLETION_SUPPRESS_APPEND = ctypes.c_int.in_dll(
lib, 'rl_completion_suppress_append')
except ValueError:
# not all versions of readline have this symbol, ie Macs sometimes
RL_COMPLETION_SUPPRESS_APPEND = None
@ -74,6 +74,7 @@ def teardown_readline():
except PermissionError:
warn('do not have write permissions for ' + hf, RuntimeWarning)
def rl_completion_suppress_append(val=1):
"""Sets the rl_completion_suppress_append varaiable, if possible.
A value of 1 (default) means to suppress, a value of 0 means to enable.
@ -82,11 +83,13 @@ def rl_completion_suppress_append(val=1):
return
RL_COMPLETION_SUPPRESS_APPEND.value = val
class Shell(Cmd):
"""The xonsh shell."""
def __init__(self, completekey='tab', stdin=None, stdout=None, ctx=None):
super(Shell, self).__init__(completekey=completekey, stdin=stdin,
super(Shell, self).__init__(completekey=completekey,
stdin=stdin,
stdout=stdout)
self.execer = Execer()
env = builtins.__xonsh_env__
@ -113,7 +116,7 @@ class Shell(Cmd):
def precmd(self, line):
return line if self.need_more_lines else line.lstrip()
def default(self, line):
"""Implements code execution."""
line = line if line.endswith('\n') else line + '\n'
@ -137,7 +140,9 @@ class Shell(Cmd):
return code
src = ''.join(self.buffer)
try:
code = self.execer.compile(src, mode='single', glbs=None,
code = self.execer.compile(src,
mode='single',
glbs=None,
locs=self.ctx)
self.reset_buffer()
except SyntaxError:
@ -157,7 +162,9 @@ class Shell(Cmd):
def completedefault(self, text, line, begidx, endidx):
"""Implements tab-completion for text."""
rl_completion_suppress_append() # this needs to be called each time
return self.completer.complete(text, line, begidx, endidx, ctx=self.ctx)
return self.completer.complete(text, line,
begidx, endidx,
ctx=self.ctx)
# tab complete on first index too
completenames = completedefault

View file

@ -22,7 +22,6 @@ import sys
import builtins
from collections import OrderedDict
if sys.version_info[0] >= 3:
string_types = (str, bytes)
unicode_type = str
@ -142,13 +141,13 @@ def indent(instr, nspaces=4, ntabs=0, flatten=False):
"""
if instr is None:
return
ind = '\t'*ntabs+' '*nspaces
ind = '\t' * ntabs + ' ' * nspaces
if flatten:
pat = re.compile(r'^\s*', re.MULTILINE)
else:
pat = re.compile(r'^', re.MULTILINE)
outstr = re.sub(pat, ind, instr)
if outstr.endswith(os.linesep+ind):
if outstr.endswith(os.linesep + ind):
return outstr[:-len(ind)]
else:
return outstr
@ -156,71 +155,71 @@ def indent(instr, nspaces=4, ntabs=0, flatten=False):
TERM_COLORS = {
# Reset
'NO_COLOR': '\001\033[0m\002', # Text Reset
'NO_COLOR': '\001\033[0m\002', # Text Reset
# Regular Colors
'BLACK': '\033[0;30m\002', # BLACK
'RED': '\001\033[0;31m\002', # RED
'GREEN': '\001\033[0;32m\002', # GREEN
'YELLOW': '\001\033[0;33m\002', # YELLOW
'BLUE': '\001\033[0;34m\002', # BLUE
'PURPLE': '\001\033[0;35m\002', # PURPLE
'CYAN': '\001\033[0;36m\002', # CYAN
'WHITE': '\001\033[0;37m\002', # WHITE
'BLACK': '\033[0;30m\002', # BLACK
'RED': '\001\033[0;31m\002', # RED
'GREEN': '\001\033[0;32m\002', # GREEN
'YELLOW': '\001\033[0;33m\002', # YELLOW
'BLUE': '\001\033[0;34m\002', # BLUE
'PURPLE': '\001\033[0;35m\002', # PURPLE
'CYAN': '\001\033[0;36m\002', # CYAN
'WHITE': '\001\033[0;37m\002', # WHITE
# Bold
'BOLD_BLACK': '\001\033[1;30m\002', # BLACK
'BOLD_RED': '\001\033[1;31m\002', # RED
'BOLD_GREEN': '\001\033[1;32m\002', # GREEN
'BOLD_YELLOW': '\001\033[1;33m\002', # YELLOW
'BOLD_BLUE': '\001\033[1;34m\002', # BLUE
'BOLD_PURPLE': '\001\033[1;35m\002', # PURPLE
'BOLD_CYAN': '\001\033[1;36m\002', # CYAN
'BOLD_WHITE': '\001\033[1;37m\002', # WHITE
'BOLD_BLACK': '\001\033[1;30m\002', # BLACK
'BOLD_RED': '\001\033[1;31m\002', # RED
'BOLD_GREEN': '\001\033[1;32m\002', # GREEN
'BOLD_YELLOW': '\001\033[1;33m\002', # YELLOW
'BOLD_BLUE': '\001\033[1;34m\002', # BLUE
'BOLD_PURPLE': '\001\033[1;35m\002', # PURPLE
'BOLD_CYAN': '\001\033[1;36m\002', # CYAN
'BOLD_WHITE': '\001\033[1;37m\002', # WHITE
# Underline
'UNDERLINE_BLACK': '\001\033[4;30m\002', # BLACK
'UNDERLINE_RED': '\001\033[4;31m\002', # RED
'UNDERLINE_GREEN': '\001\033[4;32m\002', # GREEN
'UNDERLINE_YELLOW': '\001\033[4;33m\002', # YELLOW
'UNDERLINE_BLUE': '\001\033[4;34m\002', # BLUE
'UNDERLINE_PURPLE': '\001\033[4;35m\002', # PURPLE
'UNDERLINE_CYAN': '\001\033[4;36m\002', # CYAN
'UNDERLINE_WHITE': '\001\033[4;37m\002', # WHITE
'UNDERLINE_BLACK': '\001\033[4;30m\002', # BLACK
'UNDERLINE_RED': '\001\033[4;31m\002', # RED
'UNDERLINE_GREEN': '\001\033[4;32m\002', # GREEN
'UNDERLINE_YELLOW': '\001\033[4;33m\002', # YELLOW
'UNDERLINE_BLUE': '\001\033[4;34m\002', # BLUE
'UNDERLINE_PURPLE': '\001\033[4;35m\002', # PURPLE
'UNDERLINE_CYAN': '\001\033[4;36m\002', # CYAN
'UNDERLINE_WHITE': '\001\033[4;37m\002', # WHITE
# Background
'BACKGROUND_BLACK': '\001\033[40m\002', # BLACK
'BACKGROUND_RED': '\001\033[41m\002', # RED
'BACKGROUND_GREEN': '\001\033[42m\002', # GREEN
'BACKGROUND_YELLOW': '\001\033[43m\002', # YELLOW
'BACKGROUND_BLUE': '\001\033[44m\002', # BLUE
'BACKGROUND_PURPLE': '\001\033[45m\002', # PURPLE
'BACKGROUND_CYAN': '\001\033[46m\002', # CYAN
'BACKGROUND_WHITE': '\001\033[47m\002', # WHITE
'BACKGROUND_BLACK': '\001\033[40m\002', # BLACK
'BACKGROUND_RED': '\001\033[41m\002', # RED
'BACKGROUND_GREEN': '\001\033[42m\002', # GREEN
'BACKGROUND_YELLOW': '\001\033[43m\002', # YELLOW
'BACKGROUND_BLUE': '\001\033[44m\002', # BLUE
'BACKGROUND_PURPLE': '\001\033[45m\002', # PURPLE
'BACKGROUND_CYAN': '\001\033[46m\002', # CYAN
'BACKGROUND_WHITE': '\001\033[47m\002', # WHITE
# High Intensity
'INTENSE_BLACK': '\001\033[0;90m\002', # BLACK
'INTENSE_RED': '\001\033[0;91m\002', # RED
'INTENSE_GREEN': '\001\033[0;92m\002', # GREEN
'INTENSE_YELLOW': '\001\033[0;93m\002', # YELLOW
'INTENSE_BLUE': '\001\033[0;94m\002', # BLUE
'INTENSE_PURPLE': '\001\033[0;95m\002', # PURPLE
'INTENSE_CYAN': '\001\033[0;96m\002', # CYAN
'INTENSE_WHITE': '\001\033[0;97m\002', # WHITE
'INTENSE_BLACK': '\001\033[0;90m\002', # BLACK
'INTENSE_RED': '\001\033[0;91m\002', # RED
'INTENSE_GREEN': '\001\033[0;92m\002', # GREEN
'INTENSE_YELLOW': '\001\033[0;93m\002', # YELLOW
'INTENSE_BLUE': '\001\033[0;94m\002', # BLUE
'INTENSE_PURPLE': '\001\033[0;95m\002', # PURPLE
'INTENSE_CYAN': '\001\033[0;96m\002', # CYAN
'INTENSE_WHITE': '\001\033[0;97m\002', # WHITE
# Bold High Intensity
'BOLD_INTENSE_BLACK': '\001\033[1;90m\002', # BLACK
'BOLD_INTENSE_RED': '\001\033[1;91m\002', # RED
'BOLD_INTENSE_GREEN': '\001\033[1;92m\002', # GREEN
'BOLD_INTENSE_YELLOW': '\001\033[1;93m\002', # YELLOW
'BOLD_INTENSE_BLUE': '\001\033[1;94m\002', # BLUE
'BOLD_INTENSE_PURPLE': '\001\033[1;95m\002', # PURPLE
'BOLD_INTENSE_CYAN': '\001\033[1;96m\002', # CYAN
'BOLD_INTENSE_WHITE': '\001\033[1;97m\002', # WHITE
'BOLD_INTENSE_BLACK': '\001\033[1;90m\002', # BLACK
'BOLD_INTENSE_RED': '\001\033[1;91m\002', # RED
'BOLD_INTENSE_GREEN': '\001\033[1;92m\002', # GREEN
'BOLD_INTENSE_YELLOW': '\001\033[1;93m\002', # YELLOW
'BOLD_INTENSE_BLUE': '\001\033[1;94m\002', # BLUE
'BOLD_INTENSE_PURPLE': '\001\033[1;95m\002', # PURPLE
'BOLD_INTENSE_CYAN': '\001\033[1;96m\002', # CYAN
'BOLD_INTENSE_WHITE': '\001\033[1;97m\002', # WHITE
# High Intensity backgrounds
'BACKGROUND_INTENSE_BLACK': '\001\033[0;100m\002', # BLACK
'BACKGROUND_INTENSE_RED': '\001\033[0;101m\002', # RED
'BACKGROUND_INTENSE_GREEN': '\001\033[0;102m\002', # GREEN
'BACKGROUND_INTENSE_BLACK': '\001\033[0;100m\002', # BLACK
'BACKGROUND_INTENSE_RED': '\001\033[0;101m\002', # RED
'BACKGROUND_INTENSE_GREEN': '\001\033[0;102m\002', # GREEN
'BACKGROUND_INTENSE_YELLOW': '\001\033[0;103m\002', # YELLOW
'BACKGROUND_INTENSE_BLUE': '\001\033[0;104m\002', # BLUE
'BACKGROUND_INTENSE_BLUE': '\001\033[0;104m\002', # BLUE
'BACKGROUND_INTENSE_PURPLE': '\001\033[0;105m\002', # PURPLE
'BACKGROUND_INTENSE_CYAN': '\001\033[0;106m\002', # CYAN
'BACKGROUND_INTENSE_WHITE': '\001\033[0;107m\002', # WHITE
}
'BACKGROUND_INTENSE_CYAN': '\001\033[0;106m\002', # CYAN
'BACKGROUND_INTENSE_WHITE': '\001\033[0;107m\002', # WHITE
}
# The following redirect classes were taken directly from Python 3.5's source
@ -286,21 +285,20 @@ def suggest_commands(cmd, env, aliases):
if f not in suggested and levenshtein(f, cmd, thresh) < thresh:
fname = os.path.join(d, f)
suggested[f] = 'Command ({0})'.format(fname)
suggested = OrderedDict(sorted(suggested.items(),
key=lambda x: suggestion_sort_helper(x[0],
cmd)))
suggested = OrderedDict(
sorted(suggested.items(),
key=lambda x: suggestion_sort_helper(x[0], cmd)))
num = min(len(suggested), max_sugg)
if num == 0:
return ''
else:
tips = 'Did you mean {}the following?'.format(
'' if num == 1 else 'one of ')
tips = 'Did you mean {}the following?'.format('' if num == 1 else
'one of ')
items = list(suggested.popitem(False) for _ in range(num))
length = max(len(key) for key, _ in items) + 2
alternatives = '\n'.join(' {: <{}} {}'.format(key+":",
length,
alternatives = '\n'.join(' {: <{}} {}'.format(key + ":", length,
val)
for key, val in items)
@ -313,7 +311,7 @@ def levenshtein(a, b, max_dist=float('inf')):
"""Calculates the Levenshtein distance between a and b."""
n, m = len(a), len(b)
if abs(n-m) > max_dist:
if abs(n - m) > max_dist:
return float('inf')
if n > m:
@ -321,13 +319,13 @@ def levenshtein(a, b, max_dist=float('inf')):
a, b = b, a
n, m = m, n
current = range(n+1)
for i in range(1, m+1):
previous, current = current, [i]+[0]*n
for j in range(1, n+1):
add, delete = previous[j]+1, current[j-1]+1
change = previous[j-1]
if a[j-1] != b[i-1]:
current = range(n + 1)
for i in range(1, m + 1):
previous, current = current, [i] + [0] * n
for j in range(1, n + 1):
add, delete = previous[j] + 1, current[j - 1] + 1
change = previous[j - 1]
if a[j - 1] != b[i - 1]:
change = change + 1
current[j] = min(add, delete, change)