apparmor/parser/libapparmor_re/hfa.h
John Johansen 2674a8b708 Split the nodeset used in computing the dfa into two sets, accepting and
non-accepting, and have the proto-state use them.

To reduce memory overhead each set gains its own "cache" that make sure
there is only a single instance of each NodeSet generated.  And since
we have a cache abstraction, move relavent stats into it.

Also refactor code slightly to make caches and work_queue etc, DFA member
variables instead of passing them as parameters.

The split + caching results in a small reduction in memory use as the
cost of ProtoState + Caching is less than the redundancy that is eliminated.
However this results in a small decrease in performance.

Sorry I know this really should have been split into multiple patches
but the patch evolved and I got lazy and decided to just not bother
splitting it.

Signed-off-by: John Johansen <john.johansen@canonical.com>
Acked-by: Kees Cook <kees@ubuntu.com>
2011-12-15 05:14:37 -08:00

258 lines
5.9 KiB
C++

/*
* (C) 2006, 2007 Andreas Gruenbacher <agruen@suse.de>
* Copyright (c) 2003-2008 Novell, Inc. (All rights reserved)
* Copyright 2009-2010 Canonical Ltd.
*
* The libapparmor library is licensed under the terms of the GNU
* Lesser General Public License, version 2.1. Please see the file
* COPYING.LGPL.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* Base of implementation based on the Lexical Analysis chapter of:
* Alfred V. Aho, Ravi Sethi, Jeffrey D. Ullman:
* Compilers: Principles, Techniques, and Tools (The "Dragon Book"),
* Addison-Wesley, 1986.
*/
#ifndef __LIBAA_RE_HFA_H
#define __LIBAA_RE_HFA_H
#include <list>
#include <map>
#include <vector>
#include <stdint.h>
#include "expr-tree.h"
class State;
typedef map<uchar, State *> StateTrans;
typedef list<State *> Partition;
uint32_t accept_perms(NodeSet *state, uint32_t *audit_ctl, int *error);
/*
* hashedNodes - for efficient set comparison
*/
class hashedNodeSet {
public:
unsigned long hash;
NodeSet *nodes;
hashedNodeSet(NodeSet *n): nodes(n)
{
hash = hash_NodeSet(n);
}
bool operator<(hashedNodeSet const &rhs)const
{
if (hash == rhs.hash) {
if (nodes->size() == rhs.nodes->size())
return *nodes < *(rhs.nodes);
else
return nodes->size() < rhs.nodes->size();
} else {
return hash < rhs.hash;
}
}
};
class CacheStats {
public:
unsigned long dup, sum, max;
CacheStats(void): dup(0), sum(0), max(0) { };
void clear(void) { dup = sum = max = 0; }
virtual unsigned long size(void) const = 0;
};
class NodeCache: public CacheStats {
public:
set<hashedNodeSet> cache;
NodeCache(void): cache() { };
~NodeCache() { clear(); };
virtual unsigned long size(void) const { return cache.size(); }
void clear()
{
for (set<hashedNodeSet>::iterator i = cache.begin();
i != cache.end(); i++) {
delete i->nodes;
}
cache.clear();
CacheStats::clear();
}
NodeSet *insert(NodeSet *nodes)
{
if (!nodes)
return NULL;
pair<set<hashedNodeSet>::iterator,bool> uniq;
uniq = cache.insert(hashedNodeSet(nodes));
if (uniq.second == false) {
delete(nodes);
dup++;
} else {
sum += nodes->size();
if (nodes->size() > max)
max = nodes->size();
}
return uniq.first->nodes;
}
};
/*
* ProtoState - NodeSet and ancillery information used to create a state
*/
class ProtoState {
public:
NodeSet *nnodes;
NodeSet *anodes;
ProtoState(NodeSet *n, NodeSet *a = NULL): nnodes(n), anodes(a) { };
bool operator<(ProtoState const &rhs)const
{
if (nnodes == rhs.nnodes)
return anodes < rhs.anodes;
return nnodes < rhs.nnodes;
}
unsigned long size(void)
{
if (anodes)
return nnodes->size() + anodes->size();
return nnodes->size();
}
};
/*
* State - DFA individual state information
* label: a unique label to identify the state used for pretty printing
* the non-matching state is setup to have label == 0 and
* the start state is setup to have label == 1
* audit: the audit permission mask for the state
* accept: the accept permissions for the state
* trans: set of transitions from this state
* otherwise: the default state for transitions not in @trans
* parition: Is a temporary work variable used during dfa minimization.
* it can be replaced with a map, but that is slower and uses more
* memory.
* nodes: Is a temporary work variable used during dfa creation. It can
* be replaced by using the nodemap, but that is slower
*/
class State {
public:
State(int l, ProtoState &n, State *other) throw(int):
label(l), audit(0), accept(0), trans()
{
int error;
if (other)
otherwise = other;
else
otherwise = this;
proto = n;
/* Compute permissions associated with the State. */
accept = accept_perms(n.anodes, &audit, &error);
if (error) {
//cerr << "Failing on accept perms " << error << "\n";
throw error;
}
};
int label;
uint32_t audit, accept;
StateTrans trans;
State *otherwise;
/* temp storage for State construction */
union {
Partition *partition;
ProtoState proto;
};
};
ostream &operator<<(ostream &os, const State &state);
class NodeMap: public CacheStats
{
public:
typedef map<ProtoState, State *>::iterator iterator;
iterator begin() { return cache.begin(); }
iterator end() { return cache.end(); }
map<ProtoState, State *> cache;
NodeMap(void): cache() { };
~NodeMap() { clear(); };
virtual unsigned long size(void) const { return cache.size(); }
void clear()
{
cache.clear();
CacheStats::clear();
}
pair<iterator,bool> insert(ProtoState &proto, State *state)
{
pair<iterator,bool> uniq;
uniq = cache.insert(make_pair(proto, state));
if (uniq.second == false) {
dup++;
} else {
sum += proto.size();
if (proto.size() > max)
max = proto.size();
}
return uniq;
}
};
/* Transitions in the DFA. */
class DFA {
void dump_node_to_dfa(void);
State *add_new_state(NodeSet *nodes, State *other);
void update_state_transitions(State *state);
/* temporary values used during computations */
NodeCache anodes_cache;
NodeCache nnodes_cache;
NodeMap node_map;
list<State *> work_queue;
public:
DFA(Node *root, dfaflags_t flags);
virtual ~DFA();
void remove_unreachable(dfaflags_t flags);
bool same_mappings(State *s1, State *s2);
size_t hash_trans(State *s);
void minimize(dfaflags_t flags);
void dump(ostream &os);
void dump_dot_graph(ostream &os);
void dump_uniq_perms(const char *s);
map<uchar, uchar> equivalence_classes(dfaflags_t flags);
void apply_equivalence_classes(map<uchar, uchar> &eq);
Node *root;
State *nonmatching, *start;
Partition states;
};
void dump_equivalence_classes(ostream &os, map<uchar, uchar> &eq);
#endif /* __LIBAA_RE_HFA_H */