feat(aa): parse apparmor preamble files.

This commit is contained in:
Alexandre Pujol 2024-05-27 18:55:21 +01:00
parent 2e043d4ec8
commit a99387c323
No known key found for this signature in database
GPG Key ID: C5469996F0DF68EC
6 changed files with 710 additions and 8 deletions

View File

@ -18,6 +18,37 @@ type RuleBase struct {
Optional bool
}
func newRule(rule []string) RuleBase {
comment := ""
fileInherit, noNewPrivs, optional := false, false, false
idx := 0
for idx < len(rule) {
if rule[idx] == tokCOMMENT {
comment = " " + strings.Join(rule[idx+1:], " ")
break
}
idx++
}
switch {
case strings.Contains(comment, "file_inherit"):
fileInherit = true
comment = strings.Replace(comment, "file_inherit ", "", 1)
case strings.HasPrefix(comment, "no new privs"):
noNewPrivs = true
comment = strings.Replace(comment, "no new privs ", "", 1)
case strings.Contains(comment, "optional:"):
optional = true
comment = strings.Replace(comment, "optional: ", "", 1)
}
return RuleBase{
Comment: comment,
NoNewPrivs: noNewPrivs,
FileInherit: fileInherit,
Optional: optional,
}
}
func newRuleFromLog(log map[string]string) RuleBase {
comment := ""
fileInherit, noNewPrivs, optional := false, false, false

238
pkg/aa/parse.go Normal file
View File

@ -0,0 +1,238 @@
// apparmor.d - Full set of apparmor profiles
// Copyright (C) 2021-2024 Alexandre Pujol <alexandre@pujol.io>
// SPDX-License-Identifier: GPL-2.0-only
package aa
import (
"fmt"
"slices"
"strings"
)
const (
tokARROW = "->"
tokEQUAL = "="
tokLESS = "<"
tokPLUS = "+"
tokCLOSEBRACE = '}'
tokCLOSEBRACKET = ']'
tokCLOSEPAREN = ')'
tokCOLON = ','
tokOPENBRACE = '{'
tokOPENBRACKET = '['
tokOPENPAREN = '('
)
var (
newRuleMap = map[string]func([]string) (Rule, error){
tokCOMMENT: newComment,
tokABI: newAbi,
tokALIAS: newAlias,
tokINCLUDE: newInclude,
}
openBlocks = []rune{tokOPENPAREN, tokOPENBRACE, tokOPENBRACKET}
closeBlocks = []rune{tokCLOSEPAREN, tokCLOSEBRACE, tokCLOSEBRACKET}
)
// Split a raw input rule string into tokens by space or =, but ignore spaces
// within quotes, brakets, or parentheses.
//
// Example:
//
// `owner @{user_config_dirs}/powerdevilrc{,.@{rand6}} rwl -> @{user_config_dirs}/#@{int}`
//
// Returns:
//
// []string{"owner", "@{user_config_dirs}/powerdevilrc{,.@{rand6}}", "rwl", "->", "@{user_config_dirs}/#@{int}"}
func tokenize(str string) []string {
var currentToken strings.Builder
var isVariable bool
var quoted bool
blockStack := []rune{}
tokens := make([]string, 0, len(str)/2)
if len(str) > 2 && str[0:2] == tokVARIABLE {
isVariable = true
}
for _, r := range str {
switch {
case (r == ' ' || r == '\t') && len(blockStack) == 0 && !quoted:
// Split on space/tab if not in a block or quoted
if currentToken.Len() != 0 {
tokens = append(tokens, currentToken.String())
currentToken.Reset()
}
case (r == '=' || r == '+') && len(blockStack) == 0 && !quoted && isVariable:
// Handle variable assignment
if currentToken.Len() != 0 {
tokens = append(tokens, currentToken.String())
currentToken.Reset()
}
tokens = append(tokens, string(r))
case r == '"' && len(blockStack) == 0:
quoted = !quoted
currentToken.WriteRune(r)
case slices.Contains(openBlocks, r):
blockStack = append(blockStack, r)
currentToken.WriteRune(r)
case slices.Contains(closeBlocks, r):
if len(blockStack) > 0 {
blockStack = blockStack[:len(blockStack)-1]
} else {
panic(fmt.Sprintf("Unbalanced block, missing '{' or '}' on: %s\n", str))
}
currentToken.WriteRune(r)
default:
currentToken.WriteRune(r)
}
}
if currentToken.Len() != 0 {
tokens = append(tokens, currentToken.String())
}
return tokens
}
func tokenToSlice(token string) []string {
res := []string{}
token = strings.Trim(token, "()\n")
if strings.ContainsAny(token, ", ") {
var sep string
switch {
case strings.Contains(token, ","):
sep = ","
case strings.Contains(token, " "):
sep = " "
}
for _, v := range strings.Split(token, sep) {
res = append(res, strings.Trim(v, " "))
}
} else {
res = append(res, token)
}
return res
}
func tokensStripComment(tokens []string) []string {
res := []string{}
for _, v := range tokens {
if v == tokCOMMENT {
break
}
res = append(res, v)
}
return res
}
// Convert a slice of internal rules to a slice of ApparmorRule.
func newRules(rules [][]string) (Rules, error) {
var err error
var r Rule
res := make(Rules, 0, len(rules))
for _, rule := range rules {
if len(rule) == 0 {
return nil, fmt.Errorf("Empty rule")
}
if newRule, ok := newRuleMap[rule[0]]; ok {
r, err = newRule(rule)
if err != nil {
return nil, err
}
res = append(res, r)
} else if strings.HasPrefix(rule[0], tokVARIABLE) {
r, err = newVariable(rule)
if err != nil {
return nil, err
}
res = append(res, r)
} else {
return nil, fmt.Errorf("Unrecognized rule: %s", rule)
}
}
return res, nil
}
func (f *AppArmorProfileFile) parsePreamble(input []string) error {
var err error
var r Rule
var rules Rules
tokenizedRules := [][]string{}
for _, line := range input {
if strings.HasPrefix(line, tokCOMMENT) {
r, err = newComment(strings.Split(line, " "))
if err != nil {
return err
}
rules = append(rules, r)
} else {
tokens := tokenize(line)
tokenizedRules = append(tokenizedRules, tokens)
}
}
rr, err := newRules(tokenizedRules)
if err != nil {
return err
}
f.Preamble = append(f.Preamble, rules...)
f.Preamble = append(f.Preamble, rr...)
return nil
}
// Parse an apparmor profile file.
//
// Only supports parsing of apparmor file preamble and profile headers.
//
// Warning: It is purposelly an uncomplete basic parser for apparmor profile,
// it is only aimed for internal tooling purpose. For "simplicity", it is not
// using antlr / participle. It is only used for experimental feature in the
// apparmor.d project.
//
// Stop at the first profile header. Does not support multiline coma rules.
//
// Current use case:
//
// - Parse include and tunables
// - Parse variable in profile preamble and in tunable files
// - Parse (sub) profiles header to edit flags
func (f *AppArmorProfileFile) Parse(input string) error {
rawHeader := ""
rawPreamble := []string{}
done:
for _, line := range strings.Split(input, "\n") {
tmp := strings.TrimLeft(line, "\t ")
tmp = strings.TrimRight(tmp, ",")
switch {
case tmp == "":
continue
case strings.HasPrefix(tmp, tokPROFILE):
rawHeader = tmp
break done
default:
rawPreamble = append(rawPreamble, tmp)
}
}
if err := f.parsePreamble(rawPreamble); err != nil {
return err
}
if rawHeader != "" {
header, err := newHeader(tokenize(rawHeader))
if err != nil {
return err
}
profile := &Profile{Header: header}
f.Profiles = append(f.Profiles, profile)
}
return nil
}

281
pkg/aa/parse_test.go Normal file
View File

@ -0,0 +1,281 @@
// apparmor.d - Full set of apparmor profiles
// Copyright (C) 2021-2024 Alexandre Pujol <alexandre@pujol.io>
// SPDX-License-Identifier: GPL-2.0-only
package aa
import (
"reflect"
"testing"
"github.com/roddhjav/apparmor.d/pkg/util"
)
func Test_tokenizeRule(t *testing.T) {
for _, tt := range testRules {
t.Run(tt.name, func(t *testing.T) {
if got := tokenize(tt.raw); !reflect.DeepEqual(got, tt.tokens) {
t.Errorf("tokenize() = %v, want %v", got, tt.tokens)
}
})
}
}
func Test_AppArmorProfileFile_Parse(t *testing.T) {
for _, tt := range testBlocks {
t.Run(tt.name, func(t *testing.T) {
got := &AppArmorProfileFile{}
if err := got.Parse(tt.raw); (err != nil) != tt.wParseErr {
t.Errorf("AppArmorProfileFile.Parse() error = %v, wantErr %v", err, tt.wParseErr)
}
if !reflect.DeepEqual(got, tt.apparmor) {
t.Errorf("AppArmorProfileFile.Parse() = |%v|, want |%v|", got, tt.apparmor)
}
})
}
}
var (
// Test cases for tokenize
testRules = []struct {
name string
raw string
tokens []string
}{
{
name: "empty",
raw: "",
tokens: []string{},
},
{
name: "abi",
raw: `abi <abi/4.0>`,
tokens: []string{"abi", "<abi/4.0>"},
},
{
name: "alias",
raw: `alias /mnt/usr -> /usr`,
tokens: []string{"alias", "/mnt/usr", "->", "/usr"},
},
{
name: "variable",
raw: `@{name} = torbrowser "tor browser"`,
tokens: []string{"@{name}", "=", "torbrowser", `"tor browser"`},
},
{
name: "variable-2",
raw: `@{exec_path} += @{bin}/@{name}`,
tokens: []string{"@{exec_path}", "+", "=", "@{bin}/@{name}"},
},
{
name: "variable-3",
raw: `@{empty}="dummy"`,
tokens: []string{"@{empty}", "=", `"dummy"`},
},
{
name: "variable-4",
raw: `@{XDG_PROJECTS_DIR}+="Git"`,
tokens: []string{"@{XDG_PROJECTS_DIR}", "+", "=", `"Git"`},
},
{
name: "header",
raw: `profile foo @{exec_path} xattrs=(security.tagged=allowed) flags=(complain attach_disconnected)`,
tokens: []string{"profile", "foo", "@{exec_path}", "xattrs=(security.tagged=allowed)", "flags=(complain attach_disconnected)"},
},
{
name: "include",
raw: `include <tunables/global>`,
tokens: []string{"include", "<tunables/global>"},
},
{
name: "include-if-exists",
raw: `include if exists "/etc/apparmor.d/dummy"`,
tokens: []string{"include", "if", "exists", `"/etc/apparmor.d/dummy"`},
},
{
name: "rlimit",
raw: `set rlimit nproc <= 200`,
tokens: []string{"set", "rlimit", "nproc", "<=", "200"},
},
{
name: "userns",
raw: `userns`,
tokens: []string{"userns"},
},
{
name: "capability",
raw: `capability dac_read_search`,
tokens: []string{"capability", "dac_read_search"},
},
{
name: "network",
raw: `network netlink raw`,
tokens: []string{"network", "netlink", "raw"},
},
{
name: "mount",
raw: `mount /{,**}`,
tokens: []string{"mount", "/{,**}"},
},
{
name: "mount-2",
raw: `mount options=(rw rbind) /tmp/newroot/ -> /tmp/newroot/`,
tokens: []string{"mount", "options=(rw rbind)", "/tmp/newroot/", "->", "/tmp/newroot/"},
},
{
name: "mount-3",
raw: `mount options=(rw silent rprivate) -> /oldroot/`,
tokens: []string{"mount", "options=(rw silent rprivate)", "->", "/oldroot/"},
},
{
name: "mount-4",
raw: `mount fstype=devpts options=(rw nosuid noexec) devpts -> /newroot/dev/pts/`,
tokens: []string{"mount", "fstype=devpts", "options=(rw nosuid noexec)", "devpts", "->", "/newroot/dev/pts/"},
},
{
name: "signal",
raw: `signal (receive) set=(cont, term,winch) peer=at-spi-bus-launcher`,
tokens: []string{"signal", "(receive)", "set=(cont, term,winch)", "peer=at-spi-bus-launcher"},
},
{
name: "unix",
raw: `unix (send receive) type=stream addr="@/tmp/.ICE[0-9]*-unix/19 5" peer=(label="@{p_systemd}", addr=none)`,
tokens: []string{"unix", "(send receive)", "type=stream", "addr=\"@/tmp/.ICE[0-9]*-unix/19 5\"", "peer=(label=\"@{p_systemd}\", addr=none)"},
},
{
name: "unix-2",
raw: ` unix (connect, receive, send)
type=stream
peer=(addr="@/tmp/ibus/dbus-????????")`,
tokens: []string{"unix", "(connect, receive, send)\n", "type=stream\n", `peer=(addr="@/tmp/ibus/dbus-????????")`},
},
{
name: "dbus",
raw: `dbus receive bus=system path=/org/freedesktop/DBus interface=org.freedesktop.DBus member=AddMatch peer=(name=:1.3, label=power-profiles-daemon)`,
tokens: []string{
"dbus", "receive", "bus=system",
"path=/org/freedesktop/DBus", "interface=org.freedesktop.DBus",
"member=AddMatch", "peer=(name=:1.3, label=power-profiles-daemon)",
},
},
{
name: "file-1",
raw: `owner @{user_config_dirs}/powerdevilrc{,.@{rand6}} rwl -> @{user_config_dirs}/#@{int}`,
tokens: []string{"owner", "@{user_config_dirs}/powerdevilrc{,.@{rand6}}", "rwl", "->", "@{user_config_dirs}/#@{int}"},
},
{
name: "file-2",
raw: `@{sys}/devices/@{pci}/class r`,
tokens: []string{"@{sys}/devices/@{pci}/class", "r"},
},
{
name: "file-3",
raw: `owner @{PROC}/@{pid}/task/@{tid}/comm rw`,
tokens: []string{"owner", "@{PROC}/@{pid}/task/@{tid}/comm", "rw"},
},
{
name: "file-4",
raw: `owner /{var/,}tmp/#@{int} rw`,
tokens: []string{"owner", "/{var/,}tmp/#@{int}", "rw"},
},
}
// Test cases for Parse
testBlocks = []struct {
name string
raw string
apparmor *AppArmorProfileFile
wParseErr bool
}{
{
name: "empty",
raw: "",
apparmor: &AppArmorProfileFile{},
wParseErr: false,
},
{
name: "comment",
raw: `
# IsLineRule comment
include <tunables/global> # comment included
@{lib_dirs} = @{lib}/@{name} /opt/@{name} # comment in variable`,
apparmor: &AppArmorProfileFile{
Preamble: Rules{
&Comment{RuleBase: RuleBase{IsLineRule: true, Comment: " IsLineRule comment"}},
&Include{
RuleBase: RuleBase{Comment: " comment included"},
Path: "tunables/global", IsMagic: true,
},
&Variable{
RuleBase: RuleBase{Comment: " comment in variable"},
Name: "lib_dirs", Define: true,
Values: []string{"@{lib}/@{name}", "/opt/@{name}"},
},
},
},
wParseErr: false,
},
{
name: "cornercases",
raw: `# Simple test
include <tunables/global>
# { commented block }
@{name} = {D,d}ummy
@{exec_path} = @{bin}/@{name}
alias /mnt/{,usr.sbin.}mount.cifs -> /sbin/mount.cifs,
@{coreutils} += gawk {,e,f}grep head
profile @{exec_path} {
`,
apparmor: &AppArmorProfileFile{
Preamble: Rules{
&Comment{RuleBase: RuleBase{IsLineRule: true, Comment: " Simple test"}},
&Comment{RuleBase: RuleBase{IsLineRule: true, Comment: " { commented block }"}},
&Include{IsMagic: true, Path: "tunables/global"},
&Variable{Name: "name", Values: []string{"{D,d}ummy"}, Define: true},
&Variable{Name: "exec_path", Values: []string{"@{bin}/@{name}"}, Define: true},
&Alias{Path: "/mnt/{,usr.sbin.}mount.cifs", RewrittenPath: "/sbin/mount.cifs"},
&Variable{Name: "coreutils", Values: []string{"gawk", "{,e,f}grep", "head"}, Define: false},
},
Profiles: []*Profile{
{
Header: Header{
Name: "@{exec_path}",
Attachments: []string{},
Attributes: map[string]string{},
Flags: []string{},
},
},
},
},
wParseErr: false,
},
{
name: "string.aa",
raw: util.MustReadFile(testData.Join("string.aa")),
apparmor: &AppArmorProfileFile{
Preamble: Rules{
&Comment{RuleBase: RuleBase{Comment: " Simple test profile for the AppArmorProfileFile.String() method", IsLineRule: true}},
&Abi{IsMagic: true, Path: "abi/4.0"},
&Alias{Path: "/mnt/usr", RewrittenPath: "/usr"},
&Include{IsMagic: true, Path: "tunables/global"},
&Variable{
Name: "exec_path", Define: true,
Values: []string{"@{bin}/foo", "@{lib}/foo"},
},
},
Profiles: []*Profile{
{
Header: Header{
Name: "foo",
Attachments: []string{"@{exec_path}"},
Attributes: map[string]string{"security.tagged": "allowed"},
Flags: []string{"complain", "attach_disconnected"},
},
},
},
},
wParseErr: false,
},
}
)

View File

@ -5,7 +5,9 @@
package aa
import (
"fmt"
"slices"
"strings"
)
const (
@ -21,6 +23,12 @@ type Comment struct {
RuleBase
}
func newComment(rule []string) (Rule, error) {
base := newRule(rule)
base.IsLineRule = true
return &Comment{RuleBase: base}, nil
}
func (r *Comment) Validate() error {
return nil
}
@ -55,6 +63,31 @@ type Abi struct {
IsMagic bool
}
func newAbi(rule []string) (Rule, error) {
var magic bool
if len(rule) > 0 && rule[0] == tokABI {
rule = rule[1:]
}
if len(rule) != 1 {
return nil, fmt.Errorf("invalid abi format: %s", rule)
}
path := rule[0]
switch {
case path[0] == '"':
magic = false
case path[0] == '<':
magic = true
default:
return nil, fmt.Errorf("invalid path %s in rule: %s", path, rule)
}
return &Abi{
RuleBase: newRule(rule),
Path: strings.Trim(path, "\"<>"),
IsMagic: magic,
}, nil
}
func (r *Abi) Validate() error {
return nil
}
@ -90,6 +123,23 @@ type Alias struct {
RewrittenPath string
}
func newAlias(rule []string) (Rule, error) {
if len(rule) > 0 && rule[0] == tokALIAS {
rule = rule[1:]
}
if len(rule) != 3 {
return nil, fmt.Errorf("invalid alias format: %s", rule)
}
if rule[1] != tokARROW {
return nil, fmt.Errorf("invalid alias format, missing %s in: %s", tokARROW, rule)
}
return &Alias{
RuleBase: newRule(rule),
Path: rule[0],
RewrittenPath: rule[2],
}, nil
}
func (r *Alias) Validate() error {
return nil
}
@ -126,6 +176,41 @@ type Include struct {
IsMagic bool
}
func newInclude(rule []string) (Rule, error) {
var magic bool
var ifexists bool
if len(rule) > 0 && rule[0] == tokINCLUDE {
rule = rule[1:]
}
size := len(rule)
if size == 0 {
return nil, fmt.Errorf("invalid include format: %v", rule)
}
if size >= 3 && strings.Join(rule[:2], " ") == tokIFEXISTS {
ifexists = true
rule = rule[2:]
}
path := rule[0]
switch {
case path[0] == '"':
magic = false
case path[0] == '<':
magic = true
default:
return nil, fmt.Errorf("invalid path format: %v", path)
}
return &Include{
RuleBase: newRule(rule),
IfExists: ifexists,
Path: strings.Trim(path, "\"<>"),
IsMagic: magic,
}, nil
}
func (r *Include) Validate() error {
return nil
}
@ -165,6 +250,35 @@ type Variable struct {
Define bool
}
func newVariable(rule []string) (Rule, error) {
var define bool
var values []string
if len(rule) < 3 {
return nil, fmt.Errorf("invalid variable format: %v", rule)
}
name := strings.Trim(rule[0], tokVARIABLE+"}")
switch rule[1] {
case tokEQUAL:
define = true
values = tokensStripComment(rule[2:])
case tokPLUS:
if rule[2] != tokEQUAL {
return nil, fmt.Errorf("invalid operator in variable: %v", rule)
}
define = false
values = tokensStripComment(rule[3:])
default:
return nil, fmt.Errorf("invalid operator in variable: %v", rule)
}
return &Variable{
RuleBase: newRule(rule),
Name: name,
Values: values,
Define: define,
}, nil
}
func (r *Variable) Validate() error {
return nil
}

View File

@ -45,6 +45,51 @@ type Header struct {
Flags []string
}
func newHeader(rule []string) (Header, error) {
if len(rule) == 0 {
return Header{}, nil
}
if rule[len(rule)-1] == "{" {
rule = rule[:len(rule)-1]
}
if rule[0] == tokPROFILE {
rule = rule[1:]
}
delete := []int{}
flags := []string{}
attributes := make(map[string]string)
for idx, token := range rule {
if item, ok := strings.CutPrefix(token, tokFLAGS+"="); ok {
flags = tokenToSlice(item)
delete = append(delete, idx)
} else if item, ok := strings.CutPrefix(token, tokATTRIBUTES+"="); ok {
for _, m := range tokenToSlice(item) {
kv := strings.SplitN(m, "=", 2)
attributes[kv[0]] = kv[1]
}
delete = append(delete, idx)
}
}
for i := len(delete) - 1; i >= 0; i-- {
rule = slices.Delete(rule, delete[i], delete[i]+1)
}
name, attachments := "", []string{}
if len(rule) >= 1 {
name = rule[0]
if len(rule) > 1 {
attachments = rule[1:]
}
}
return Header{
Name: name,
Attachments: attachments,
Attributes: attributes,
Flags: flags,
}, nil
}
func (r *Profile) Validate() error {
if err := validateValues(r.Kind(), tokFLAGS, r.Flags); err != nil {
return fmt.Errorf("profile %s: %w", r.Name, err)

View File

@ -153,19 +153,12 @@ func validateValues(rule string, key string, values []string) error {
// Helper function to convert a string to a slice of rule values according to
// the rule requirements as defined in the requirements map.
func toValues(rule string, key string, input string) ([]string, error) {
var sep string
req, ok := requirements[rule][key]
if !ok {
return nil, fmt.Errorf("unrecognized requirement '%s' for rule %s", key, rule)
}
switch {
case strings.Contains(input, ","):
sep = ","
case strings.Contains(input, " "):
sep = " "
}
res := strings.Split(input, sep)
res := tokenToSlice(input)
for idx := range res {
res[idx] = strings.Trim(res[idx], `" `)
if !slices.Contains(req, res[idx]) {