mirror of
https://gitlab.com/apparmor/apparmor.git
synced 2025-03-04 08:24:42 +01:00
parser: Move nodeset caching into expr-tree.h
We need to rework permission type mapping to nodesets, which means we need to move the nodeset computations earlier in the dfa creation processes, instead of a post step of follow(), so move the nodeset into expr-tree Signed-off-by: John Johansen <john.johansen@canonical.com> Acked-by: Steve Beattie <steve@nxnw.org> Acked-by: Seth Arnold <seth.arnold@canonical.com>
This commit is contained in:
parent
831db5985f
commit
73c74d044d
2 changed files with 176 additions and 175 deletions
|
@ -607,4 +607,180 @@ public:
|
|||
DenyMatchFlag(uint32_t flag, uint32_t quiet): MatchFlag(flag, quiet) {}
|
||||
};
|
||||
|
||||
|
||||
/*
|
||||
* hashedNodes - for efficient set comparison
|
||||
*/
|
||||
class hashedNodeSet {
|
||||
public:
|
||||
unsigned long hash;
|
||||
NodeSet *nodes;
|
||||
|
||||
hashedNodeSet(NodeSet *n): nodes(n)
|
||||
{
|
||||
hash = hash_NodeSet(n);
|
||||
}
|
||||
|
||||
bool operator<(hashedNodeSet const &rhs)const
|
||||
{
|
||||
if (hash == rhs.hash) {
|
||||
if (nodes->size() == rhs.nodes->size())
|
||||
return *nodes < *(rhs.nodes);
|
||||
else
|
||||
return nodes->size() < rhs.nodes->size();
|
||||
} else {
|
||||
return hash < rhs.hash;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class hashedNodeVec {
|
||||
public:
|
||||
typedef ImportantNode ** iterator;
|
||||
iterator begin() { return nodes; }
|
||||
iterator end() { iterator t = nodes ? &nodes[len] : NULL; return t; }
|
||||
|
||||
unsigned long hash;
|
||||
unsigned long len;
|
||||
ImportantNode **nodes;
|
||||
|
||||
hashedNodeVec(NodeSet *n)
|
||||
{
|
||||
hash = hash_NodeSet(n);
|
||||
len = n->size();
|
||||
nodes = new ImportantNode *[n->size()];
|
||||
|
||||
unsigned int j = 0;
|
||||
for (NodeSet::iterator i = n->begin(); i != n->end(); i++, j++) {
|
||||
nodes[j] = *i;
|
||||
}
|
||||
}
|
||||
|
||||
hashedNodeVec(NodeSet *n, unsigned long h): hash(h)
|
||||
{
|
||||
len = n->size();
|
||||
nodes = new ImportantNode *[n->size()];
|
||||
ImportantNode **j = nodes;
|
||||
for (NodeSet::iterator i = n->begin(); i != n->end(); i++) {
|
||||
*(j++) = *i;
|
||||
}
|
||||
}
|
||||
|
||||
~hashedNodeVec()
|
||||
{
|
||||
delete nodes;
|
||||
}
|
||||
|
||||
unsigned long size()const { return len; }
|
||||
|
||||
bool operator<(hashedNodeVec const &rhs)const
|
||||
{
|
||||
if (hash == rhs.hash) {
|
||||
if (len == rhs.size()) {
|
||||
for (unsigned int i = 0; i < len; i++) {
|
||||
if (nodes[i] != rhs.nodes[i])
|
||||
return nodes[i] < rhs.nodes[i];
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return len < rhs.size();
|
||||
}
|
||||
return hash < rhs.hash;
|
||||
}
|
||||
};
|
||||
|
||||
class CacheStats {
|
||||
public:
|
||||
unsigned long dup, sum, max;
|
||||
|
||||
CacheStats(void): dup(0), sum(0), max(0) { };
|
||||
|
||||
void clear(void) { dup = sum = max = 0; }
|
||||
virtual unsigned long size(void) const = 0;
|
||||
};
|
||||
|
||||
class NodeCache: public CacheStats {
|
||||
public:
|
||||
set<hashedNodeSet> cache;
|
||||
|
||||
NodeCache(void): cache() { };
|
||||
~NodeCache() { clear(); };
|
||||
|
||||
virtual unsigned long size(void) const { return cache.size(); }
|
||||
|
||||
void clear()
|
||||
{
|
||||
for (set<hashedNodeSet>::iterator i = cache.begin();
|
||||
i != cache.end(); i++) {
|
||||
delete i->nodes;
|
||||
}
|
||||
cache.clear();
|
||||
CacheStats::clear();
|
||||
}
|
||||
|
||||
NodeSet *insert(NodeSet *nodes)
|
||||
{
|
||||
if (!nodes)
|
||||
return NULL;
|
||||
pair<set<hashedNodeSet>::iterator,bool> uniq;
|
||||
uniq = cache.insert(hashedNodeSet(nodes));
|
||||
if (uniq.second == false) {
|
||||
delete(nodes);
|
||||
dup++;
|
||||
} else {
|
||||
sum += nodes->size();
|
||||
if (nodes->size() > max)
|
||||
max = nodes->size();
|
||||
}
|
||||
return uniq.first->nodes;
|
||||
}
|
||||
};
|
||||
|
||||
struct deref_less_than {
|
||||
bool operator()(hashedNodeVec * const &lhs, hashedNodeVec * const &rhs)const
|
||||
{
|
||||
return *lhs < *rhs;
|
||||
}
|
||||
};
|
||||
|
||||
class NodeVecCache: public CacheStats {
|
||||
public:
|
||||
set<hashedNodeVec *, deref_less_than> cache;
|
||||
|
||||
NodeVecCache(void): cache() { };
|
||||
~NodeVecCache() { clear(); };
|
||||
|
||||
virtual unsigned long size(void) const { return cache.size(); }
|
||||
|
||||
void clear()
|
||||
{
|
||||
for (set<hashedNodeVec *>::iterator i = cache.begin();
|
||||
i != cache.end(); i++) {
|
||||
delete *i;
|
||||
}
|
||||
cache.clear();
|
||||
CacheStats::clear();
|
||||
}
|
||||
|
||||
hashedNodeVec *insert(NodeSet *nodes)
|
||||
{
|
||||
if (!nodes)
|
||||
return NULL;
|
||||
pair<set<hashedNodeVec *>::iterator,bool> uniq;
|
||||
hashedNodeVec *nv = new hashedNodeVec(nodes);
|
||||
uniq = cache.insert(nv);
|
||||
if (uniq.second == false) {
|
||||
delete nv;
|
||||
dup++;
|
||||
} else {
|
||||
sum += nodes->size();
|
||||
if (nodes->size() > max)
|
||||
max = nodes->size();
|
||||
}
|
||||
delete(nodes);
|
||||
return (*uniq.first);
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* __LIBAA_RE_EXPR */
|
||||
|
|
|
@ -130,181 +130,6 @@ public:
|
|||
|
||||
int accept_perms(NodeSet *state, perms_t &perms);
|
||||
|
||||
/*
|
||||
* hashedNodes - for efficient set comparison
|
||||
*/
|
||||
class hashedNodeSet {
|
||||
public:
|
||||
unsigned long hash;
|
||||
NodeSet *nodes;
|
||||
|
||||
hashedNodeSet(NodeSet *n): nodes(n)
|
||||
{
|
||||
hash = hash_NodeSet(n);
|
||||
}
|
||||
|
||||
bool operator<(hashedNodeSet const &rhs)const
|
||||
{
|
||||
if (hash == rhs.hash) {
|
||||
if (nodes->size() == rhs.nodes->size())
|
||||
return *nodes < *(rhs.nodes);
|
||||
else
|
||||
return nodes->size() < rhs.nodes->size();
|
||||
} else {
|
||||
return hash < rhs.hash;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
class hashedNodeVec {
|
||||
public:
|
||||
typedef ImportantNode ** iterator;
|
||||
iterator begin() { return nodes; }
|
||||
iterator end() { iterator t = nodes ? &nodes[len] : NULL; return t; }
|
||||
|
||||
unsigned long hash;
|
||||
unsigned long len;
|
||||
ImportantNode **nodes;
|
||||
|
||||
hashedNodeVec(NodeSet *n)
|
||||
{
|
||||
hash = hash_NodeSet(n);
|
||||
len = n->size();
|
||||
nodes = new ImportantNode *[n->size()];
|
||||
|
||||
unsigned int j = 0;
|
||||
for (NodeSet::iterator i = n->begin(); i != n->end(); i++, j++) {
|
||||
nodes[j] = *i;
|
||||
}
|
||||
}
|
||||
|
||||
hashedNodeVec(NodeSet *n, unsigned long h): hash(h)
|
||||
{
|
||||
len = n->size();
|
||||
nodes = new ImportantNode *[n->size()];
|
||||
ImportantNode **j = nodes;
|
||||
for (NodeSet::iterator i = n->begin(); i != n->end(); i++) {
|
||||
*(j++) = *i;
|
||||
}
|
||||
}
|
||||
|
||||
~hashedNodeVec()
|
||||
{
|
||||
delete nodes;
|
||||
}
|
||||
|
||||
unsigned long size()const { return len; }
|
||||
|
||||
bool operator<(hashedNodeVec const &rhs)const
|
||||
{
|
||||
if (hash == rhs.hash) {
|
||||
if (len == rhs.size()) {
|
||||
for (unsigned int i = 0; i < len; i++) {
|
||||
if (nodes[i] != rhs.nodes[i])
|
||||
return nodes[i] < rhs.nodes[i];
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return len < rhs.size();
|
||||
}
|
||||
return hash < rhs.hash;
|
||||
}
|
||||
};
|
||||
|
||||
class CacheStats {
|
||||
public:
|
||||
unsigned long dup, sum, max;
|
||||
|
||||
CacheStats(void): dup(0), sum(0), max(0) { };
|
||||
|
||||
void clear(void) { dup = sum = max = 0; }
|
||||
virtual unsigned long size(void) const = 0;
|
||||
};
|
||||
|
||||
class NodeCache: public CacheStats {
|
||||
public:
|
||||
set<hashedNodeSet> cache;
|
||||
|
||||
NodeCache(void): cache() { };
|
||||
~NodeCache() { clear(); };
|
||||
|
||||
virtual unsigned long size(void) const { return cache.size(); }
|
||||
|
||||
void clear()
|
||||
{
|
||||
for (set<hashedNodeSet>::iterator i = cache.begin();
|
||||
i != cache.end(); i++) {
|
||||
delete i->nodes;
|
||||
}
|
||||
cache.clear();
|
||||
CacheStats::clear();
|
||||
}
|
||||
|
||||
NodeSet *insert(NodeSet *nodes)
|
||||
{
|
||||
if (!nodes)
|
||||
return NULL;
|
||||
pair<set<hashedNodeSet>::iterator,bool> uniq;
|
||||
uniq = cache.insert(hashedNodeSet(nodes));
|
||||
if (uniq.second == false) {
|
||||
delete(nodes);
|
||||
dup++;
|
||||
} else {
|
||||
sum += nodes->size();
|
||||
if (nodes->size() > max)
|
||||
max = nodes->size();
|
||||
}
|
||||
return uniq.first->nodes;
|
||||
}
|
||||
};
|
||||
|
||||
struct deref_less_than {
|
||||
bool operator()(hashedNodeVec * const &lhs, hashedNodeVec * const &rhs)const
|
||||
{
|
||||
return *lhs < *rhs;
|
||||
}
|
||||
};
|
||||
|
||||
class NodeVecCache: public CacheStats {
|
||||
public:
|
||||
set<hashedNodeVec *, deref_less_than> cache;
|
||||
|
||||
NodeVecCache(void): cache() { };
|
||||
~NodeVecCache() { clear(); };
|
||||
|
||||
virtual unsigned long size(void) const { return cache.size(); }
|
||||
|
||||
void clear()
|
||||
{
|
||||
for (set<hashedNodeVec *>::iterator i = cache.begin();
|
||||
i != cache.end(); i++) {
|
||||
delete *i;
|
||||
}
|
||||
cache.clear();
|
||||
CacheStats::clear();
|
||||
}
|
||||
|
||||
hashedNodeVec *insert(NodeSet *nodes)
|
||||
{
|
||||
if (!nodes)
|
||||
return NULL;
|
||||
pair<set<hashedNodeVec *>::iterator,bool> uniq;
|
||||
hashedNodeVec *nv = new hashedNodeVec(nodes);
|
||||
uniq = cache.insert(nv);
|
||||
if (uniq.second == false) {
|
||||
delete nv;
|
||||
dup++;
|
||||
} else {
|
||||
sum += nodes->size();
|
||||
if (nodes->size() > max)
|
||||
max = nodes->size();
|
||||
}
|
||||
delete(nodes);
|
||||
return (*uniq.first);
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
* ProtoState - NodeSet and ancillery information used to create a state
|
||||
*/
|
||||
|
|
Loading…
Add table
Reference in a new issue