Rework how the lexer identifies hat names following a ^

Reworking this code is a step to getting rid of the SUB_NAME2 start
condition.

Signed-off-by: John Johansen <john.johansen@canonical.com>
Acked-by: Kees Cook <kees@ubuntu.com>
This commit is contained in:
John Johansen 2012-01-02 16:49:31 -08:00
parent 6ef6f605b0
commit ff98d79963
2 changed files with 6 additions and 7 deletions

View file

@ -168,13 +168,12 @@ void include_filename(char *filename, int search)
%}
UP "^"
CARET "^"
OPEN_BRACE \{
CLOSE_BRACE \}
SLASH \/
COLON :
END_OF_RULE [,]
SEPARATOR {UP}
RANGE -
MODE_CHARS ([RrWwaLlMmkXx])|(([Pp]|[Cc])[Xx])|(([Pp]|[Cc])?([IiUu])[Xx])
MODES {MODE_CHARS}+
@ -447,11 +446,11 @@ LT_EQUAL <=
{END_OF_RULE} { DUMP_PREPROCESS; return TOK_END_OF_RULE; }
{SEPARATOR} {
{CARET} {
DUMP_PREPROCESS;
PDEBUG("Matched a separator\n");
PDEBUG("Matched hat ^\n");
yy_push_state(SUB_NAME);
return TOK_SEP;
return TOK_CARET;
}
{ARROW} {
DUMP_PREPROCESS;

View file

@ -78,7 +78,7 @@ void add_local_entry(struct codomain *cod);
%}
%token TOK_ID
%token TOK_SEP
%token TOK_CARET
%token TOK_OPEN
%token TOK_CLOSE
%token TOK_MODE
@ -1009,7 +1009,7 @@ network_rule: TOK_NETWORK TOK_ID TOK_ID TOK_END_OF_RULE
$$ = entry;
}
hat_start: TOK_SEP {}
hat_start: TOK_CARET {}
| TOK_HAT {}
file_mode: TOK_MODE