Merge pull request #1862 from chronitis/first-class-paths

First class path objects
This commit is contained in:
Gil Forsyth 2016-12-08 19:42:36 -05:00 committed by GitHub
commit 26a6414314
7 changed files with 95 additions and 6 deletions

View file

@ -967,6 +967,24 @@ return type from a list of strings to a list of :class:`pathlib.Path` objects:
[Path('a_link')]
Path Literals
-------------
Path objects can be instantiated directly using *p-string* syntax. Path objects
can be converted back to plain strings with `str()`, and this conversion is
handled implicitly in subprocess mode.
.. code-block:: xonshcon
>>> mypath = p'/foo/bar'
>>> mypath
Path('/foo/bar')
>>> mypath.stem
'bar'
>>> echo @(mypath)
/foo/bar
Help & Superhelp with ``?`` & ``??``
=====================================================
From IPython, xonsh allows you to inspect objects with question marks.

View file

@ -0,0 +1,14 @@
**Added:**
* p-strings: ``p'/foo/bar'`` is short for ``pathlib.Path('/foo/bar')``
* byte strings: prefixes other than ``b'foo'`` (eg, ``RB'foo'``) now work
**Changed:** None
**Deprecated:** None
**Removed:** None
**Fixed:** None
**Security:** None

View file

@ -189,6 +189,15 @@ def test_double_unicode_literal():
def test_single_bytes_literal():
assert check_token("b'yo'", ['STRING', "b'yo'", 0])
def test_path_string_literal():
assert check_token("p'/foo'", ['STRING', "p'/foo'", 0])
assert check_token('p"/foo"', ['STRING', 'p"/foo"', 0])
assert check_token("pr'/foo'", ['STRING', "pr'/foo'", 0])
assert check_token('pr"/foo"', ['STRING', 'pr"/foo"', 0])
assert check_token("rp'/foo'", ['STRING', "rp'/foo'", 0])
assert check_token('rp"/foo"', ['STRING', 'rp"/foo"', 0])
def test_regex_globs():
for i in ('.*', r'\d*', '.*#{1,2}'):
for p in ('', 'r', 'g', '@somethingelse', 'p', 'pg'):

View file

@ -86,6 +86,17 @@ def test_str_literal():
def test_bytes_literal():
check_ast('b"hello"')
check_ast('B"hello"')
def test_raw_literal():
check_ast('r"hell\o"')
check_ast('R"hell\o"')
def test_raw_bytes_literal():
check_ast('br"hell\o"')
check_ast('RB"hell\o"')
check_ast('Br"hell\o"')
check_ast('rB"hell\o"')
def test_unary_plus():
check_ast('+1')
@ -1489,6 +1500,14 @@ def test_async_await():
# Xonsh specific syntax
#
def test_path_literal():
check_xonsh_ast({}, 'p"/foo"', False)
check_xonsh_ast({}, 'pr"/foo"', False)
check_xonsh_ast({}, 'rp"/foo"', False)
check_xonsh_ast({}, 'pR"/foo"', False)
check_xonsh_ast({}, 'Rp"/foo"', False)
def test_dollar_name():
check_xonsh_ast({'WAKKA': 42}, '$WAKKA')

View file

@ -138,6 +138,11 @@ def reglob(path, parts=None, i=None):
return paths
def path_literal(s):
s = expand_path(s)
return pathlib.Path(s)
def regexsearch(s):
s = expand_path(s)
return reglob(s)
@ -1140,6 +1145,7 @@ def load_builtins(execer=None, config=None, login=False, ctx=None):
builtins.__xonsh_completers__ = xonsh.completers.init.default_completers()
builtins.__xonsh_call_macro__ = call_macro
builtins.__xonsh_enter_macro__ = enter_macro
builtins.__xonsh_path_literal__ = path_literal
# public built-ins
builtins.XonshError = XonshError
builtins.XonshBlockError = XonshBlockError
@ -1207,6 +1213,7 @@ def unload_builtins():
'__xonsh_completers__',
'__xonsh_call_macro__',
'__xonsh_enter_macro__',
'__xonsh_path_literal__',
'XonshError',
'XonshBlockError',
'XonshCalledProcessError',

View file

@ -16,12 +16,14 @@ from xonsh import ast
from xonsh.ast import has_elts, xonsh_call
from xonsh.lexer import Lexer, LexToken
from xonsh.platform import PYTHON_VERSION_INFO
from xonsh.tokenize import SearchPath
from xonsh.tokenize import SearchPath, StringPrefix
from xonsh.lazyasd import LazyObject
from xonsh.parsers.context_check import check_contexts
RE_SEARCHPATH = LazyObject(lambda: re.compile(SearchPath), globals(),
'RE_SEARCHPATH')
RE_STRINGPREFIX = LazyObject(lambda: re.compile(StringPrefix), globals(),
'RE_STRINGPREFIX')
class Location(object):
@ -1977,9 +1979,18 @@ class BaseParser(object):
def p_string_literal(self, p):
"""string_literal : string_tok"""
p1 = p[1]
s = ast.literal_eval(p1.value)
cls = ast.Bytes if p1.value.startswith('b') else ast.Str
p[0] = cls(s=s, lineno=p1.lineno, col_offset=p1.lexpos)
prefix = RE_STRINGPREFIX.match(p1.value).group()
if 'p' in prefix:
value_without_p = prefix.replace('p', '') + p1.value[len(prefix):]
s = ast.Str(s=ast.literal_eval(value_without_p), lineno=p1.lineno,
col_offset=p1.lexpos)
p[0] = xonsh_call('__xonsh_path_literal__', [s],
lineno=p1.lineno, col=p1.lexpos)
else:
s = ast.literal_eval(p1.value)
is_bytes = 'b' in prefix or 'B' in prefix
cls = ast.Bytes if is_bytes else ast.Str
p[0] = cls(s=s, lineno=p1.lineno, col_offset=p1.lexpos)
def p_string_literal_list(self, p):
"""string_literal_list : string_literal

View file

@ -205,7 +205,7 @@ Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'[0-9]+[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
StringPrefix = r'(?:[bB][rR]?|[rR][bB]?|[uU])?'
StringPrefix = r'(?:[bBp][rR]?|[rR][bBp]?|[uU])?'
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
@ -275,8 +275,13 @@ endpats = {"'": Single, '"': Double,
"RB'''": Single3, 'RB"""': Double3,
"u'''": Single3, 'u"""': Double3,
"U'''": Single3, 'U"""': Double3,
"p'''": Single3, 'p"""': Double3,
"pr'''": Single3, 'pr"""': Double3,
"pR'''": Single3, 'pR"""': Double3,
"rp'''": Single3, 'rp"""': Double3,
"Rp'''": Single3, 'Rp"""': Double3,
'r': None, 'R': None, 'b': None, 'B': None,
'u': None, 'U': None}
'u': None, 'U': None, 'p': None}
triple_quoted = {}
for t in ("'''", '"""',
@ -287,6 +292,9 @@ for t in ("'''", '"""',
"rb'''", 'rb"""', "rB'''", 'rB"""',
"Rb'''", 'Rb"""', "RB'''", 'RB"""',
"u'''", 'u"""', "U'''", 'U"""',
"p'''", 'p""""', "pr'''", 'pr""""',
"pR'''", 'pR""""', "rp'''", 'rp""""',
"Rp'''", 'Rp""""',
):
triple_quoted[t] = t
single_quoted = {}
@ -298,6 +306,9 @@ for t in ("'", '"',
"rb'", 'rb"', "rB'", 'rB"',
"Rb'", 'Rb"', "RB'", 'RB"',
"u'", 'u"', "U'", 'U"',
"p'", 'p"', "pr'", 'pr"',
"pR'", 'pR"', "rp'", 'rp"',
"Rp'", 'Rp"',
):
single_quoted[t] = t