mirror of
https://github.com/xonsh/xonsh.git
synced 2025-03-04 08:24:40 +01:00
Adds tools.executables_in
This commit is contained in:
parent
3f303fcb5b
commit
f076ca2e1c
2 changed files with 98 additions and 22 deletions
|
@ -1,51 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""Tests the xonsh lexer."""
|
||||
from __future__ import unicode_literals, print_function
|
||||
import os
|
||||
import random
|
||||
from tempfile import TemporaryDirectory
|
||||
import stat
|
||||
|
||||
import nose
|
||||
from nose.tools import assert_equal, assert_true, assert_false
|
||||
|
||||
from xonsh.lexer import Lexer
|
||||
from xonsh.tools import (subproc_toks, subexpr_from_unbalanced, is_int,
|
||||
always_true, always_false, ensure_string, is_env_path, str_to_env_path,
|
||||
env_path_to_str, escape_windows_cmd_string, is_bool, to_bool, bool_to_str,
|
||||
from xonsh.tools import (
|
||||
subproc_toks, subexpr_from_unbalanced, is_int, always_true, always_false,
|
||||
ensure_string, is_env_path, str_to_env_path, env_path_to_str,
|
||||
escape_windows_cmd_string, is_bool, to_bool, bool_to_str,
|
||||
ensure_int_or_slice, is_float, is_string, check_for_partial_string,
|
||||
argvquote)
|
||||
argvquote, executables_in)
|
||||
|
||||
LEXER = Lexer()
|
||||
LEXER.build()
|
||||
|
||||
INDENT = ' '
|
||||
|
||||
|
||||
def test_subproc_toks_x():
|
||||
exp = '![x]'
|
||||
obs = subproc_toks('x', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_ls_l():
|
||||
exp = '![ls -l]'
|
||||
obs = subproc_toks('ls -l', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_git():
|
||||
s = 'git commit -am "hello doc"'
|
||||
exp = '![{0}]'.format(s)
|
||||
obs = subproc_toks(s, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_git_semi():
|
||||
s = 'git commit -am "hello doc"'
|
||||
exp = '![{0}];'.format(s)
|
||||
obs = subproc_toks(s + ';', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_git_nl():
|
||||
s = 'git commit -am "hello doc"'
|
||||
exp = '![{0}]\n'.format(s)
|
||||
obs = subproc_toks(s + '\n', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls():
|
||||
s = 'ls -l'
|
||||
exp = INDENT + '![{0}]'.format(s)
|
||||
|
@ -53,6 +62,7 @@ def test_subproc_toks_indent_ls():
|
|||
returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls_nl():
|
||||
s = 'ls -l'
|
||||
exp = INDENT + '![{0}]\n'.format(s)
|
||||
|
@ -60,30 +70,35 @@ def test_subproc_toks_indent_ls_nl():
|
|||
returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls_no_min():
|
||||
s = 'ls -l'
|
||||
exp = INDENT + '![{0}]'.format(s)
|
||||
obs = subproc_toks(INDENT + s, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls_no_min_nl():
|
||||
s = 'ls -l'
|
||||
exp = INDENT + '![{0}]\n'.format(s)
|
||||
obs = subproc_toks(INDENT + s + '\n', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls_no_min_semi():
|
||||
s = 'ls'
|
||||
exp = INDENT + '![{0}];'.format(s)
|
||||
obs = subproc_toks(INDENT + s + ';', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls_no_min_semi_nl():
|
||||
s = 'ls'
|
||||
exp = INDENT + '![{0}];\n'.format(s)
|
||||
obs = subproc_toks(INDENT + s + ';\n', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_ls_comment():
|
||||
s = 'ls -l'
|
||||
com = ' # lets list'
|
||||
|
@ -91,6 +106,7 @@ def test_subproc_toks_ls_comment():
|
|||
obs = subproc_toks(s + com, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_ls_42_comment():
|
||||
s = 'ls 42'
|
||||
com = ' # lets list'
|
||||
|
@ -98,6 +114,7 @@ def test_subproc_toks_ls_42_comment():
|
|||
obs = subproc_toks(s + com, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_ls_str_comment():
|
||||
s = 'ls "wakka"'
|
||||
com = ' # lets list'
|
||||
|
@ -105,6 +122,7 @@ def test_subproc_toks_ls_str_comment():
|
|||
obs = subproc_toks(s + com, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls_comment():
|
||||
ind = ' '
|
||||
s = 'ls -l'
|
||||
|
@ -113,6 +131,7 @@ def test_subproc_toks_indent_ls_comment():
|
|||
obs = subproc_toks(ind + s + com, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_indent_ls_str():
|
||||
ind = ' '
|
||||
s = 'ls "wakka"'
|
||||
|
@ -121,6 +140,7 @@ def test_subproc_toks_indent_ls_str():
|
|||
obs = subproc_toks(ind + s + com, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_ls_l_semi_ls_first():
|
||||
lsdl = 'ls -l'
|
||||
ls = 'ls'
|
||||
|
@ -129,6 +149,7 @@ def test_subproc_toks_ls_l_semi_ls_first():
|
|||
obs = subproc_toks(s, lexer=LEXER, maxcol=6, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_ls_l_semi_ls_second():
|
||||
lsdl = 'ls -l'
|
||||
ls = 'ls'
|
||||
|
@ -137,6 +158,7 @@ def test_subproc_toks_ls_l_semi_ls_second():
|
|||
obs = subproc_toks(s, lexer=LEXER, mincol=7, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_hello_mom_first():
|
||||
fst = "echo 'hello'"
|
||||
sec = "echo 'mom'"
|
||||
|
@ -145,6 +167,7 @@ def test_subproc_toks_hello_mom_first():
|
|||
obs = subproc_toks(s, lexer=LEXER, maxcol=len(fst)+1, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_hello_mom_second():
|
||||
fst = "echo 'hello'"
|
||||
sec = "echo 'mom'"
|
||||
|
@ -153,58 +176,69 @@ def test_subproc_toks_hello_mom_second():
|
|||
obs = subproc_toks(s, lexer=LEXER, mincol=len(fst), returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_comment():
|
||||
exp = None
|
||||
obs = subproc_toks('# I am a comment', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_not():
|
||||
exp = 'not ![echo mom]'
|
||||
obs = subproc_toks('not echo mom', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_paren():
|
||||
exp = '(![echo mom])'
|
||||
obs = subproc_toks('(echo mom)', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_paren_ws():
|
||||
exp = '(![echo mom]) '
|
||||
obs = subproc_toks('(echo mom) ', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_not_paren():
|
||||
exp = 'not (![echo mom])'
|
||||
obs = subproc_toks('not (echo mom)', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_and_paren():
|
||||
exp = 'True and (![echo mom])'
|
||||
obs = subproc_toks('True and (echo mom)', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_paren_and_paren():
|
||||
exp = '(![echo a]) and (echo b)'
|
||||
obs = subproc_toks('(echo a) and (echo b)', maxcol=9, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_semicolon_only():
|
||||
exp = None
|
||||
obs = subproc_toks(';', lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_pyeval():
|
||||
s = 'echo @(1+1)'
|
||||
exp = '![{0}]'.format(s)
|
||||
obs = subproc_toks(s, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_twopyeval():
|
||||
s = 'echo @(1+1) @(40 + 2)'
|
||||
exp = '![{0}]'.format(s)
|
||||
obs = subproc_toks(s, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_pyeval_parens():
|
||||
s = 'echo @(1+1)'
|
||||
inp = '({0})'.format(s)
|
||||
|
@ -212,6 +246,7 @@ def test_subproc_toks_pyeval_parens():
|
|||
obs = subproc_toks(inp, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_twopyeval_parens():
|
||||
s = 'echo @(1+1) @(40+2)'
|
||||
inp = '({0})'.format(s)
|
||||
|
@ -219,12 +254,14 @@ def test_subproc_toks_twopyeval_parens():
|
|||
obs = subproc_toks(inp, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_pyeval_nested():
|
||||
s = 'echo @(min(1, 42))'
|
||||
exp = '![{0}]'.format(s)
|
||||
obs = subproc_toks(s, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_pyeval_nested_parens():
|
||||
s = 'echo @(min(1, 42))'
|
||||
inp = '({0})'.format(s)
|
||||
|
@ -232,18 +269,21 @@ def test_subproc_toks_pyeval_nested_parens():
|
|||
obs = subproc_toks(inp, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_capstdout():
|
||||
s = 'echo $(echo bat)'
|
||||
exp = '![{0}]'.format(s)
|
||||
obs = subproc_toks(s, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_capproc():
|
||||
s = 'echo !(echo bat)'
|
||||
exp = '![{0}]'.format(s)
|
||||
obs = subproc_toks(s, lexer=LEXER, returnline=True)
|
||||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
def test_subproc_toks_pyeval_redirect():
|
||||
s = 'echo @("foo") > bar'
|
||||
inp = '{0}'.format(s)
|
||||
|
@ -252,7 +292,6 @@ def test_subproc_toks_pyeval_redirect():
|
|||
assert_equal(exp, obs)
|
||||
|
||||
|
||||
|
||||
def test_subexpr_from_unbalanced_parens():
|
||||
cases = [
|
||||
('f(x.', 'x.'),
|
||||
|
@ -263,26 +302,32 @@ def test_subexpr_from_unbalanced_parens():
|
|||
obs = subexpr_from_unbalanced(expr, '(', ')')
|
||||
yield assert_equal, exp, obs
|
||||
|
||||
|
||||
def test_is_int():
|
||||
yield assert_true, is_int(42)
|
||||
yield assert_false, is_int('42')
|
||||
|
||||
|
||||
def test_is_float():
|
||||
yield assert_true, is_float(42.0)
|
||||
yield assert_false, is_float('42.0')
|
||||
|
||||
|
||||
def test_is_string():
|
||||
yield assert_true, is_string('42.0')
|
||||
yield assert_false, is_string(42.0)
|
||||
|
||||
|
||||
def test_always_true():
|
||||
yield assert_true, always_true(42)
|
||||
yield assert_true, always_true('42')
|
||||
|
||||
|
||||
def test_always_false():
|
||||
yield assert_false, always_false(42)
|
||||
yield assert_false, always_false('42')
|
||||
|
||||
|
||||
def test_ensure_string():
|
||||
cases = [
|
||||
(42, '42'),
|
||||
|
@ -292,6 +337,7 @@ def test_ensure_string():
|
|||
obs = ensure_string(inp)
|
||||
yield assert_equal, exp, obs
|
||||
|
||||
|
||||
def test_is_env_path():
|
||||
cases = [
|
||||
('/home/wakka', False),
|
||||
|
@ -301,6 +347,7 @@ def test_is_env_path():
|
|||
obs = is_env_path(inp)
|
||||
yield assert_equal, exp, obs
|
||||
|
||||
|
||||
def test_str_to_env_path():
|
||||
cases = [
|
||||
('/home/wakka', ['/home/wakka']),
|
||||
|
@ -311,6 +358,7 @@ def test_str_to_env_path():
|
|||
obs = str_to_env_path(inp)
|
||||
yield assert_equal, exp, obs
|
||||
|
||||
|
||||
def test_env_path_to_str():
|
||||
cases = [
|
||||
(['/home/wakka'], '/home/wakka'),
|
||||
|
@ -415,11 +463,12 @@ _startend = {c+s: s for c in _chars for s in _squote}
|
|||
|
||||
inners = "this is a string"
|
||||
|
||||
|
||||
def test_partial_string():
|
||||
# single string at start
|
||||
yield assert_equal, check_for_partial_string('no strings here'), (None, None, None)
|
||||
yield assert_equal, check_for_partial_string(''), (None, None, None)
|
||||
for s,e in _startend.items():
|
||||
for s, e in _startend.items():
|
||||
_test = s + inners + e
|
||||
for l in _leaders:
|
||||
for f in _leaders:
|
||||
|
@ -441,5 +490,27 @@ def test_partial_string():
|
|||
yield assert_equal, _res, (len(l+_test+f+l2), None, s2)
|
||||
|
||||
|
||||
def test_executables_in():
|
||||
expected = set()
|
||||
with TemporaryDirectory() as test_path:
|
||||
for i in range(random.randint(100, 200)):
|
||||
_type = random.choice(('none', 'file', 'file', 'directory'))
|
||||
if _type == 'none':
|
||||
continue
|
||||
executable = random.choice((True, True, False))
|
||||
if _type == 'file' and executable:
|
||||
expected.add(str(i))
|
||||
path = os.path.join(test_path, str(i))
|
||||
if _type == 'file':
|
||||
open(path, 'w').close()
|
||||
elif _type == 'directory':
|
||||
os.mkdir(path)
|
||||
if executable:
|
||||
os.chmod(path, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR)
|
||||
|
||||
result = set(executables_in(test_path))
|
||||
assert_equal(expected, result)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
nose.runmodule()
|
||||
|
|
|
@ -34,7 +34,6 @@ from collections import OrderedDict, Sequence, Set
|
|||
# dependencies
|
||||
from xonsh.platform import (has_prompt_toolkit, scandir, win_unicode_console,
|
||||
DEFAULT_ENCODING, ON_LINUX, ON_WINDOWS)
|
||||
|
||||
if has_prompt_toolkit():
|
||||
import prompt_toolkit
|
||||
else:
|
||||
|
@ -58,6 +57,7 @@ BEG_TOK_SKIPS = frozenset(['WS', 'INDENT', 'NOT', 'LPAREN'])
|
|||
END_TOK_TYPES = frozenset(['SEMI', 'AND', 'OR', 'RPAREN'])
|
||||
LPARENS = frozenset(['LPAREN', 'AT_LPAREN', 'BANG_LPAREN', 'DOLLAR_LPAREN', 'ATDOLLAR_LPAREN'])
|
||||
|
||||
|
||||
def _is_not_lparen_and_rparen(lparens, rtok):
|
||||
"""Tests if an RPAREN token is matched with something other than a plain old
|
||||
LPAREN type.
|
||||
|
@ -222,6 +222,7 @@ def indent(instr, nspaces=4, ntabs=0, flatten=False):
|
|||
else:
|
||||
return outstr
|
||||
|
||||
|
||||
def get_sep():
|
||||
""" Returns the appropriate filepath separator char depending on OS and
|
||||
xonsh options set
|
||||
|
@ -231,7 +232,6 @@ def get_sep():
|
|||
os.sep)
|
||||
|
||||
|
||||
|
||||
def fallback(cond, backup):
|
||||
"""Decorator for returning the object if cond is true and a backup if cond is false.
|
||||
"""
|
||||
|
@ -285,6 +285,12 @@ class redirect_stderr(_RedirectStream):
|
|||
_stream = "stderr"
|
||||
|
||||
|
||||
def executables_in(path):
|
||||
"""Returns a generator of files in `path` that the user could execute. """
|
||||
return (x.name for x in scandir(path)
|
||||
if x.is_file() and os.access(x.path, os.X_OK))
|
||||
|
||||
|
||||
def command_not_found(cmd):
|
||||
"""Uses the debian/ubuntu command-not-found utility to suggest packages for a
|
||||
command that cannot currently be found.
|
||||
|
@ -309,20 +315,20 @@ def suggest_commands(cmd, env, aliases):
|
|||
max_sugg = env.get('SUGGEST_MAX_NUM')
|
||||
if max_sugg < 0:
|
||||
max_sugg = float('inf')
|
||||
|
||||
cmd = cmd.lower()
|
||||
suggested = {}
|
||||
for a in builtins.aliases:
|
||||
if a not in suggested:
|
||||
if levenshtein(a.lower(), cmd, thresh) < thresh:
|
||||
suggested[a] = 'Alias'
|
||||
|
||||
for d in filter(os.path.isdir, env.get('PATH')):
|
||||
for f in os.listdir(d):
|
||||
if f not in suggested \
|
||||
and levenshtein(f.lower(), cmd, thresh) < thresh:
|
||||
fname = os.path.join(d, f)
|
||||
suggested[f] = 'Command ({0})'.format(fname)
|
||||
for alias in builtins.aliases:
|
||||
if alias not in suggested:
|
||||
if levenshtein(alias.lower(), cmd, thresh) < thresh:
|
||||
suggested[alias] = 'Alias'
|
||||
|
||||
for path in filter(os.path.isdir, env.get('PATH')):
|
||||
for _file in executables_in(path):
|
||||
if _file not in suggested \
|
||||
and levenshtein(_file.lower(), cmd, thresh) < thresh:
|
||||
suggested[_file] = 'Command ({0})'.format(os.path.join(path, _file))
|
||||
|
||||
suggested = OrderedDict(
|
||||
sorted(suggested.items(),
|
||||
key=lambda x: suggestion_sort_helper(x[0].lower(), cmd)))
|
||||
|
@ -1073,8 +1079,7 @@ class CommandsCache(Set):
|
|||
return self._cmds_cache
|
||||
allcmds = set()
|
||||
for path in paths:
|
||||
allcmds |= set(x.name for x in scandir(path)
|
||||
if x.is_file() and os.access(x.path, os.X_OK))
|
||||
allcmds |= set(executables_in(path))
|
||||
allcmds |= set(builtins.aliases)
|
||||
self._cmds_cache = frozenset(allcmds)
|
||||
return self._cmds_cache
|
||||
|
|
Loading…
Add table
Reference in a new issue