add ?( token to lexer

This commit is contained in:
adam j hartz 2016-03-02 21:38:05 -05:00
parent 0d3519f954
commit c7a61c3728

View file

@ -211,6 +211,10 @@ def handle_question(state, token, stream):
n.string == '?' and n.start == token.end:
state['last'] = n
yield _new_token('DOUBLE_QUESTION', '??', token.start)
elif (n is not None and n.type == tokenize.OP
and n.string == '(' and n.start == token.end):
state['last'] = n
yield _new_token('QUESTION_LPAREN', '?(', token.start)
else:
state['last'] = token
yield _new_token('QUESTION', '?', token.start)