initial commit
This commit is contained in:
parent
0fbe15dc8a
commit
89ec2321b7
484
refpolicy/support/fc_sort.c
Normal file
484
refpolicy/support/fc_sort.c
Normal file
@ -0,0 +1,484 @@
|
||||
#include <stdio.h>
|
||||
|
||||
/* file_context_node
|
||||
* A node used in a linked list of file contexts.
|
||||
* Each node contains the regular expression, the type and
|
||||
* the context, as well as information about the regular
|
||||
* expression. The regular expression data (meta, stem_len
|
||||
* and str_len) can be filled in by using the fc_fill_data
|
||||
* function after the regular expression has been loaded.
|
||||
* next points to the next node in the linked list.
|
||||
*/
|
||||
struct file_context_node {
|
||||
char* regex;
|
||||
char* type;
|
||||
char* context;
|
||||
int meta;
|
||||
int stem_len;
|
||||
int str_len;
|
||||
|
||||
struct file_context_node* next;
|
||||
};
|
||||
|
||||
|
||||
|
||||
/* file_context_bucket
|
||||
* A node used in a linked list of buckets that contain
|
||||
* file_context_node's.
|
||||
* Each node contains a pointer to a file_context_node which
|
||||
* is the header of its linked list. This linked list is the
|
||||
* content of this bucket.
|
||||
* next points to the next bucket in the linked list.
|
||||
*/
|
||||
struct file_context_bucket {
|
||||
struct file_context_node* data;
|
||||
|
||||
struct file_context_bucket* next;
|
||||
};
|
||||
|
||||
|
||||
|
||||
/* fc_merge
|
||||
* Merges two sorted file context linked lists into one
|
||||
* sorted one.
|
||||
* Pass two lists a and b, and after the completion of fc_merge,
|
||||
* the final list is contained in a, and b is empty.
|
||||
*/
|
||||
struct file_context_node* fc_merge( struct file_context_node* a, struct file_context_node* b )
|
||||
{
|
||||
struct file_context_node* a_current;
|
||||
struct file_context_node* b_current;
|
||||
struct file_context_node* temp;
|
||||
struct file_context_node* jumpto;
|
||||
|
||||
/* If a is a empty list, and b is not,
|
||||
* set a as b and proceed to the end. */
|
||||
if( !a && b )
|
||||
a = b;
|
||||
/* If b is an empty list, leave a as it is. */
|
||||
else if( !b ) { }
|
||||
else {
|
||||
/* Make it so the list a has the lesser
|
||||
* first element always. */
|
||||
if( fc_compare( a, b ) == 1 ) {
|
||||
temp = a;
|
||||
a = b;
|
||||
b = temp;
|
||||
}
|
||||
a_current = a;
|
||||
b_current = b;
|
||||
|
||||
/* Merge by inserting b's nodes inbetween a's nodes. */
|
||||
while( a_current->next && b_current ) {
|
||||
jumpto = a_current->next;
|
||||
|
||||
/* Insert b's nodes inbetween the current a node
|
||||
* and the next a node.*/
|
||||
while( b_current && a_current->next &&
|
||||
fc_compare( a_current->next, b_current) != -1 ) { temp = a_current->next;
|
||||
a_current->next = b_current;
|
||||
b_current = b_current->next;
|
||||
a_current->next->next = temp;
|
||||
a_current = a_current->next;
|
||||
}
|
||||
|
||||
/* Skip all the inserted node from b to the
|
||||
* next node in the original a. */
|
||||
a_current = jumpto;
|
||||
}
|
||||
|
||||
|
||||
/* if there is anything left in b to be inserted,
|
||||
put it on the end */
|
||||
if( b_current ) {
|
||||
a_current->next = b_current;
|
||||
}
|
||||
}
|
||||
|
||||
b = NULL;
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* fc_merge_sort
|
||||
* Sorts file contexts from least specific to more specific.
|
||||
* The bucket linked list is passed and after the completion
|
||||
* of the fc_merge_sort function, there is only one bucket
|
||||
* (pointed to by master) that contains a linked list
|
||||
* of all the file contexts, in sorted order.
|
||||
* Explanation of the algorithm:
|
||||
* The algorithm implemented in fc_merge_sort is an iterative
|
||||
* implementation of merge sort.
|
||||
* At first, each bucket has a linked list of file contexts
|
||||
* that are 1 element each.
|
||||
* Each pass, each odd numbered bucket is merged into the bucket
|
||||
* before it. This halves the number of buckets each pass.
|
||||
* It will continue passing over the buckets (as described above)
|
||||
* until there is only one bucket left, containing the list of
|
||||
* file contexts, sorted.
|
||||
*/
|
||||
void fc_merge_sort( struct file_context_bucket* master )
|
||||
{
|
||||
int i;
|
||||
|
||||
struct file_context_bucket* current;
|
||||
struct file_context_bucket* temp;
|
||||
|
||||
struct file_context_node* ncurrent;
|
||||
struct file_context_node* ntemp;
|
||||
|
||||
/* Loop until master is the only bucket left
|
||||
* so that this will stop when master contains
|
||||
* the sorted list. */
|
||||
while( master->next ) {
|
||||
current = master;
|
||||
|
||||
/* This loop merges buckets two-by-two. */
|
||||
while( current ) {
|
||||
if( current->next ) {
|
||||
/* Merge the next one into the current one. */
|
||||
current->data = fc_merge( current->data, current->next->data );
|
||||
/* remove the next bucket that is now empty. */
|
||||
temp = current->next;
|
||||
current->next = current->next->next;
|
||||
free( temp );
|
||||
}
|
||||
current = current->next;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* fc_compare
|
||||
* Compares two file contexts' regular expressions and returns:
|
||||
* -1 if a is less specific than b
|
||||
* 0 if a and be are equally specific
|
||||
* 1 if a is more specific than b
|
||||
* The comparison is based on the following statements,
|
||||
* in order from most important to least important, given a and b:
|
||||
* If a is a regular expression and b is not,
|
||||
* -> a is less specific than b.
|
||||
* If a's stem length is shorter than b's stem length,
|
||||
* -> a is less specific than b.
|
||||
* If a's string length is shorter than b's string length,
|
||||
* -> a is less specific than b.
|
||||
* If a does not have a specified type and b does not,
|
||||
* -> a is less specific than b.
|
||||
*/
|
||||
int fc_compare( struct file_context_node* a, struct file_context_node* b )
|
||||
{
|
||||
/* Check to see if either a or b have meta characters
|
||||
* and the other doesn't. */
|
||||
if( a->meta && !b->meta )
|
||||
return -1;
|
||||
if( b->meta && !a->meta )
|
||||
return 1;
|
||||
|
||||
/* Check to see if either a or b have a shorter stem
|
||||
* length than the other. */
|
||||
if( a->stem_len < b->stem_len )
|
||||
return -1;
|
||||
if( b->stem_len < a->stem_len )
|
||||
return 1;
|
||||
|
||||
/* Check to see if either a or b have a shorter string
|
||||
* length than the other. */
|
||||
if( a->str_len < b->str_len )
|
||||
return -1;
|
||||
if( b->str_len < b->str_len )
|
||||
return 1;
|
||||
|
||||
/* Check to see if either a or b has a specified type
|
||||
* and the other doesn't. */
|
||||
if( !a->type && b->type )
|
||||
return -1;
|
||||
if( !b->type && a->type )
|
||||
return 1;
|
||||
|
||||
/* If none of the above conditions were satisfied,
|
||||
* then a and b are equally specific. */
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* fc_fill_data
|
||||
* This processes a regular expression in a file context
|
||||
* and sets the data held in file_context_node, namely
|
||||
* meta, str_len and stem_len.
|
||||
* The following changes are made to fc_node after the
|
||||
* the completion of the function:
|
||||
* fc_node->meta = 1 if regex has a meta character,
|
||||
* 0 if not.
|
||||
* fc_node->str_len = The string length of the regular
|
||||
* expression.
|
||||
* fc_node->stem_len = The number of characters up until
|
||||
* the first meta character.
|
||||
*/
|
||||
void fc_fill_data( struct file_context_node* fc_node )
|
||||
{
|
||||
int c = 0;
|
||||
|
||||
fc_node->meta = 0;
|
||||
fc_node->stem_len = 0;
|
||||
fc_node->str_len = 0;
|
||||
|
||||
/* Process until the string termination character
|
||||
* has been reached.
|
||||
* Note: this while loop has been adapted from
|
||||
* spec_hasMetaChars in matchpathcon.c from
|
||||
* libselinux-1.22. */
|
||||
while( fc_node->regex[c] != 0 ) {
|
||||
switch( fc_node->regex[c] ) {
|
||||
case '.':
|
||||
case '^':
|
||||
case '$':
|
||||
case '?':
|
||||
case '*':
|
||||
case '+':
|
||||
case '|':
|
||||
case '[':
|
||||
case '(':
|
||||
case '{':
|
||||
/* If a meta character is found,
|
||||
* set meta to one */
|
||||
fc_node->meta = 1;
|
||||
break;
|
||||
case '\\':
|
||||
/* If a escape character is found,
|
||||
* skip the next character. */
|
||||
c++;
|
||||
default:
|
||||
/* If no meta character has been found yet,
|
||||
* add one to the stem length. */
|
||||
if( !fc_node->meta ) fc_node->stem_len++;
|
||||
break;
|
||||
}
|
||||
|
||||
fc_node->str_len++;
|
||||
c++;
|
||||
}
|
||||
}
|
||||
|
||||
/* main
|
||||
* This program takes in two arguments, the input filename and the
|
||||
* output filename. The input file should be syntactically correct.
|
||||
* Overall what is done in the main is read in the file and store each
|
||||
* line of code, sort it, then output it to the output file.
|
||||
*/
|
||||
int main( int argc, char *argv[])
|
||||
{
|
||||
int i, j, lines;
|
||||
int start, finish;
|
||||
char* str;
|
||||
struct file_context_node* temp;
|
||||
struct file_context_node* head;
|
||||
struct file_context_node* current;
|
||||
struct file_context_node* array;
|
||||
struct file_context_bucket* master;
|
||||
struct file_context_bucket* bcurrent;
|
||||
|
||||
FILE *path;
|
||||
char line_buf[ 127 ];
|
||||
|
||||
/* Check for the correct number of command line arguments. */
|
||||
if( argc != 3 ) {
|
||||
printf( "Error: invalid number of command line arguments.\n" );
|
||||
return -1;
|
||||
}
|
||||
|
||||
i = j = lines = 0;
|
||||
|
||||
/* Allocate the head of the file_context linked list. */
|
||||
if( !( current = head = (struct file_context_node*)malloc( sizeof( struct file_context_node ) ) ) ) {
|
||||
printf( "Error: failure allocating memory.\n" );
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Make sure to have a terminating character, always. */
|
||||
line_buf[127] = 0;
|
||||
|
||||
/* Open the input file. */
|
||||
if( !( path = fopen( argv[1], "r" ) ) ) {
|
||||
printf( "Error: failure opening input file for read.\n" );
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Parse the file into a file_context linked list. */
|
||||
while( fgets( line_buf, 126, path ) != NULL ) {
|
||||
|
||||
/* Get rid of whitespace from the front of the line. */
|
||||
i = 0;
|
||||
while( line_buf[i] && line_buf[i] <= 32 ) i++;
|
||||
|
||||
/* Check if the line isn't empty and isn't a comment */
|
||||
if( line_buf[i] && line_buf[i] != '#' ) {
|
||||
/* Allocate a new node. */
|
||||
temp = (struct file_context_node*)malloc( sizeof( struct file_context_node ) );
|
||||
if( !temp ) {
|
||||
printf( "Error: failure allocating memory.\n" );
|
||||
return -1;
|
||||
}
|
||||
temp->next = NULL;
|
||||
|
||||
/* Parse out the regular expression from the line. */
|
||||
start = i;
|
||||
while( line_buf[i] > 32 )i++;
|
||||
finish = i;
|
||||
|
||||
/* Allocate a character array to hold the regular
|
||||
* expression. */
|
||||
temp->regex = (char*)malloc( sizeof( char ) * ( finish - start + 1) );
|
||||
if( !( temp->regex ) ) {
|
||||
printf( "Error: failure allocating memory.\n" );
|
||||
return -1;
|
||||
}
|
||||
temp->regex[0] = 0;
|
||||
|
||||
/* Fill the regular expression array. */
|
||||
temp->regex[ ( finish - start ) ] = 0;
|
||||
for( j = 0; j < finish - start; j++ ) {
|
||||
temp->regex[j] = line_buf[j + start];
|
||||
}
|
||||
|
||||
/* Get rid of whitespace after the regular
|
||||
* expression. */
|
||||
while( line_buf[i] <= 32 ) i++;
|
||||
|
||||
/* Parse out the type from the line (if it
|
||||
* is there). */
|
||||
if( line_buf[i] == '-' ) {
|
||||
/* Allocate a character array to
|
||||
* hold the type. */
|
||||
temp->type = (char*)malloc( sizeof( char ) * 3 );
|
||||
if( !( temp->type ) ) {
|
||||
printf( "Error: failure allocating memory.\n" );
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Fill the type into the array. */
|
||||
temp->type[0] = line_buf[i];
|
||||
temp->type[1] = line_buf[i + 1];
|
||||
i += 2;
|
||||
temp->type[2] = 0;
|
||||
|
||||
/* Get rid of whitespace after the type. */
|
||||
while( line_buf[i] <= 32 ) i++;
|
||||
}
|
||||
|
||||
/* Parse out the context from the line. */
|
||||
start = i;
|
||||
while( line_buf[i] > 32 ) i++;
|
||||
finish = i;
|
||||
|
||||
/* Allocate a character array to hold the context. */
|
||||
temp->context = (char*)malloc( sizeof( char ) * ( finish - start + 1 ) );
|
||||
if( !( temp->context ) ) {
|
||||
printf( "Error: failure allocating memory.\n" );
|
||||
return -1;
|
||||
}
|
||||
temp->context[0] = 0;
|
||||
|
||||
/* Fill the context array. */
|
||||
temp->context[ ( finish - start ) ] = 0;
|
||||
for( j = 0; j < finish - start; j++ ) {
|
||||
temp->context[j] = line_buf[j + start];
|
||||
}
|
||||
|
||||
/* Set all the data about the regular
|
||||
* expression. */
|
||||
fc_fill_data( temp );
|
||||
|
||||
/* Link this line of code at the end of
|
||||
* the linked list. */
|
||||
current->next = temp;
|
||||
current = current->next;
|
||||
lines++;
|
||||
}
|
||||
}
|
||||
fclose( path );
|
||||
|
||||
/* Create the bucket linked list from the earlier linked list. */
|
||||
current = head->next;
|
||||
bcurrent = master = (struct file_context_bucket*)malloc( sizeof( struct file_context_bucket ) );
|
||||
/* Go until all the nodes have been put in individual buckets. */
|
||||
while( current ) {
|
||||
/* Copy over the file context line into the bucket. */
|
||||
bcurrent->data = current;
|
||||
current = current->next;
|
||||
|
||||
/* Detatch the node in the bucket from the old list. */
|
||||
bcurrent->data->next = NULL;
|
||||
|
||||
/* If there should be another bucket, put one at the end. */
|
||||
if( current ) {
|
||||
bcurrent->next = (struct file_context_bucket*) malloc( sizeof( struct file_context_bucket ) );
|
||||
if( !( bcurrent->next ) ) {
|
||||
printf( "Error: failure allocating memory.\n" );
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Make sure the new bucket thinks it's the end of the
|
||||
* list. */
|
||||
bcurrent->next->next = NULL;
|
||||
|
||||
bcurrent = bcurrent->next;
|
||||
}
|
||||
}
|
||||
|
||||
/* Sort the bucket list. */
|
||||
fc_merge_sort( master );
|
||||
|
||||
/* Open the output file. */
|
||||
if( !(path = fopen( argv[2], "w" ) ) ) {
|
||||
printf( "Error: failure opening output file for write.\n" );
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Output the sorted file_context linked list to the output file. */
|
||||
current = master->data;
|
||||
while( current ) {
|
||||
/* Output the regular expression. */
|
||||
i = 0;
|
||||
while( current->regex[i] != 0 ) {
|
||||
fprintf( path, "%c", current->regex[i] );
|
||||
i++;
|
||||
}
|
||||
fprintf( path, "\t" );
|
||||
|
||||
/* Output the type, if there is one. */
|
||||
if( current->type ) {
|
||||
i = 0;
|
||||
while( current->type[i] != 0 ) {
|
||||
fprintf( path, "%c", current->type[i] );
|
||||
i++;
|
||||
}
|
||||
fprintf( path, "\t" );
|
||||
}
|
||||
|
||||
/* Output the context. */
|
||||
i = 0;
|
||||
while( current->context[i] != 0 ) {
|
||||
fprintf( path, "%c", current->context[i] );
|
||||
i++;
|
||||
}
|
||||
fprintf( path, "\n" );
|
||||
|
||||
/* Remove the node. */
|
||||
temp = current;
|
||||
current = current->next;
|
||||
|
||||
free( temp->regex );
|
||||
if( temp->type)
|
||||
free( temp->type );
|
||||
free( temp->context );
|
||||
free( temp );
|
||||
}
|
||||
free( master );
|
||||
|
||||
fclose( path );
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
391
refpolicy/support/pyplate.py
Executable file
391
refpolicy/support/pyplate.py
Executable file
@ -0,0 +1,391 @@
|
||||
"""PyPlate : a simple Python-based templating program
|
||||
|
||||
PyPlate parses a file and replaces directives (in double square brackets [[ ... ]])
|
||||
by various means using a given dictionary of variables. Arbitrary Python code
|
||||
can be run inside many of the directives, making this system highly flexible.
|
||||
|
||||
Usage:
|
||||
# Load and parse template file
|
||||
template = pyplate.Template("output") (filename or string)
|
||||
# Execute it with a dictionary of variables
|
||||
template.execute_file(output_stream, locals())
|
||||
|
||||
PyPlate defines the following directives:
|
||||
[[...]] evaluate the arbitrary Python expression and insert the
|
||||
result into the output
|
||||
|
||||
[[# ... #]] comment.
|
||||
|
||||
[[exec ...]] execute arbitrary Python code in the sandbox namespace
|
||||
|
||||
[[if ...]] conditional expressions with usual Python semantics
|
||||
[[elif ...]]
|
||||
[[else]]
|
||||
[[end]]
|
||||
|
||||
[[for ... in ...]] for-loop with usual Python semantics
|
||||
[[end]]
|
||||
|
||||
[[def ...(...)]] define a "function" out of other templating elements
|
||||
[[end]]
|
||||
|
||||
[[call ...]] call a templating function (not a regular Python function)
|
||||
"""
|
||||
|
||||
#
|
||||
# Copyright (C) 2002 Michael Droettboom
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
|
||||
#
|
||||
|
||||
from __future__ import nested_scopes
|
||||
import sys, string, re, cStringIO
|
||||
|
||||
re_directive = re.compile("\[\[(.*)\]\]")
|
||||
re_for_loop = re.compile("for (.*) in (.*)")
|
||||
re_if = re.compile("if (.*)")
|
||||
re_elif = re.compile("elif (.*)")
|
||||
re_def = re.compile("def (.*?)\((.*)\)")
|
||||
re_call = re.compile("call (.*?)\((.*)\)")
|
||||
re_exec = re.compile("exec (.*)")
|
||||
re_comment = re.compile("#(.*)#")
|
||||
|
||||
############################################################
|
||||
# Template parser
|
||||
class ParserException(Exception):
|
||||
def __init__(self, lineno, s):
|
||||
Exception.__init__(self, "line %d: %s" % (lineno, s))
|
||||
|
||||
class Template:
|
||||
def __init__(self, filename=None):
|
||||
if filename != None:
|
||||
try:
|
||||
self.parse_file(filename)
|
||||
except:
|
||||
self.parse_string(filename)
|
||||
|
||||
def parse_file(self, filename):
|
||||
file = open(filename, 'r')
|
||||
self.parse(file)
|
||||
file.close()
|
||||
|
||||
def parse_string(self, template):
|
||||
file = cStringIO.StringIO(template)
|
||||
self.parse(file)
|
||||
file.close()
|
||||
|
||||
def parse(self, file):
|
||||
self.file = file
|
||||
self.line = self.file.read()
|
||||
self.lineno = 0
|
||||
self.functions = {}
|
||||
self.tree = TopLevelTemplateNode(self)
|
||||
|
||||
def parser_get(self):
|
||||
if self.line == '':
|
||||
return None
|
||||
return self.line
|
||||
|
||||
def parser_eat(self, chars):
|
||||
self.lineno = self.lineno + self.line[:chars].count("\n")
|
||||
self.line = self.line[chars:]
|
||||
|
||||
def parser_exception(self, s):
|
||||
raise ParserException(self.lineno, s)
|
||||
|
||||
def execute_file(self, filename, data):
|
||||
file = open(filename, 'w')
|
||||
self.execute(file, data)
|
||||
file.close()
|
||||
|
||||
def execute_string(self, data):
|
||||
s = cStringIO.StringIO()
|
||||
self.execute(s, data)
|
||||
return s.getvalue()
|
||||
|
||||
def execute_stdout(self, data):
|
||||
self.execute(sys.stdout, data)
|
||||
|
||||
def execute(self, stream=sys.stdout, data={}):
|
||||
self.tree.execute(stream, data)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.tree)
|
||||
|
||||
|
||||
############################################################
|
||||
# NODES
|
||||
class TemplateNode:
|
||||
def __init__(self, parent, s):
|
||||
self.parent = parent
|
||||
self.s = s
|
||||
self.node_list = []
|
||||
while 1:
|
||||
new_node = TemplateNodeFactory(parent)
|
||||
if self.add_node(new_node):
|
||||
break
|
||||
|
||||
def add_node(self, node):
|
||||
if node == 'end':
|
||||
return 1
|
||||
elif node != None:
|
||||
self.node_list.append(node)
|
||||
else:
|
||||
raise self.parent.parser_exception(
|
||||
"[[%s]] does not have a matching [[end]]" % self.s)
|
||||
|
||||
def execute(self, stream, data):
|
||||
for node in self.node_list:
|
||||
node.execute(stream, data)
|
||||
|
||||
def __repr__(self):
|
||||
r = "<" + self.__class__.__name__ + " "
|
||||
for i in self.node_list:
|
||||
r = r + repr(i)
|
||||
r = r + ">"
|
||||
return r
|
||||
|
||||
class TopLevelTemplateNode(TemplateNode):
|
||||
def __init__(self, parent):
|
||||
TemplateNode.__init__(self, parent, '')
|
||||
|
||||
def add_node(self, node):
|
||||
if node != None:
|
||||
self.node_list.append(node)
|
||||
else:
|
||||
return 1
|
||||
|
||||
class ForTemplateNode(TemplateNode):
|
||||
def __init__(self, parent, s):
|
||||
TemplateNode.__init__(self, parent, s)
|
||||
match = re_for_loop.match(s)
|
||||
if match == None:
|
||||
raise self.parent.parser_exception(
|
||||
"[[%s]] is not a valid for-loop expression" % self.s)
|
||||
else:
|
||||
self.vars_temp = match.group(1).split(",")
|
||||
self.vars = []
|
||||
for v in self.vars_temp:
|
||||
self.vars.append(v.strip())
|
||||
print self.vars
|
||||
self.expression = match.group(2)
|
||||
|
||||
def execute(self, stream, data):
|
||||
remember_vars = {}
|
||||
for var in self.vars:
|
||||
if data.has_key(var):
|
||||
remember_vars[var] = data[var]
|
||||
for list in eval(self.expression, globals(), data):
|
||||
if util.is_sequence(list):
|
||||
for index, value in util.enumerate(list):
|
||||
data[self.vars[index]] = value
|
||||
else:
|
||||
data[self.vars[0]] = list
|
||||
TemplateNode.execute(self, stream, data)
|
||||
for key, value in remember_vars.items():
|
||||
data[key] = value
|
||||
|
||||
class IfTemplateNode(TemplateNode):
|
||||
def __init__(self, parent, s):
|
||||
self.else_node = None
|
||||
TemplateNode.__init__(self, parent, s)
|
||||
match = re_if.match(s)
|
||||
if match == None:
|
||||
raise self.parent.parser_exception(
|
||||
"[[%s]] is not a valid if expression" % self.s)
|
||||
else:
|
||||
self.expression = match.group(1)
|
||||
|
||||
def add_node(self, node):
|
||||
if node == 'end':
|
||||
return 1
|
||||
elif isinstance(node, ElseTemplateNode):
|
||||
self.else_node = node
|
||||
return 1
|
||||
elif isinstance(node, ElifTemplateNode):
|
||||
self.else_node = node
|
||||
return 1
|
||||
elif node != None:
|
||||
self.node_list.append(node)
|
||||
else:
|
||||
raise self.parent.parser_exception(
|
||||
"[[%s]] does not have a matching [[end]]" % self.s)
|
||||
|
||||
def execute(self, stream, data):
|
||||
if eval(self.expression, globals(), data):
|
||||
TemplateNode.execute(self, stream, data)
|
||||
elif self.else_node != None:
|
||||
self.else_node.execute(stream, data)
|
||||
|
||||
class ElifTemplateNode(IfTemplateNode):
|
||||
def __init__(self, parent, s):
|
||||
self.else_node = None
|
||||
TemplateNode.__init__(self, parent, s)
|
||||
match = re_elif.match(s)
|
||||
if match == None:
|
||||
self.parent.parser_exception(
|
||||
"[[%s]] is not a valid elif expression" % self.s)
|
||||
else:
|
||||
self.expression = match.group(1)
|
||||
|
||||
class ElseTemplateNode(TemplateNode):
|
||||
pass
|
||||
|
||||
class FunctionTemplateNode(TemplateNode):
|
||||
def __init__(self, parent, s):
|
||||
TemplateNode.__init__(self, parent, s)
|
||||
match = re_def.match(s)
|
||||
if match == None:
|
||||
self.parent.parser_exception(
|
||||
"[[%s]] is not a valid function definition" % self.s)
|
||||
self.function_name = match.group(1)
|
||||
self.vars_temp = match.group(2).split(",")
|
||||
self.vars = []
|
||||
for v in self.vars_temp:
|
||||
self.vars.append(v.strip())
|
||||
print self.vars
|
||||
self.parent.functions[self.function_name] = self
|
||||
|
||||
def execute(self, stream, data):
|
||||
pass
|
||||
|
||||
def call(self, args, stream, data):
|
||||
remember_vars = {}
|
||||
for index, var in util.enumerate(self.vars):
|
||||
if data.has_key(var):
|
||||
remember_vars[var] = data[var]
|
||||
data[var] = args[index]
|
||||
TemplateNode.execute(self, stream, data)
|
||||
for key, value in remember_vars.items():
|
||||
data[key] = value
|
||||
|
||||
class LeafTemplateNode(TemplateNode):
|
||||
def __init__(self, parent, s):
|
||||
self.parent = parent
|
||||
self.s = s
|
||||
|
||||
def execute(self, stream, data):
|
||||
stream.write(self.s)
|
||||
|
||||
def __repr__(self):
|
||||
return "<" + self.__class__.__name__ + ">"
|
||||
|
||||
class CommentTemplateNode(LeafTemplateNode):
|
||||
def execute(self, stream, data):
|
||||
pass
|
||||
|
||||
class ExpressionTemplateNode(LeafTemplateNode):
|
||||
def execute(self, stream, data):
|
||||
stream.write(str(eval(self.s, globals(), data)))
|
||||
|
||||
class ExecTemplateNode(LeafTemplateNode):
|
||||
def __init__(self, parent, s):
|
||||
LeafTemplateNode.__init__(self, parent, s)
|
||||
match = re_exec.match(s)
|
||||
if match == None:
|
||||
self.parent.parser_exception(
|
||||
"[[%s]] is not a valid statement" % self.s)
|
||||
self.s = match.group(1)
|
||||
|
||||
def execute(self, stream, data):
|
||||
exec(self.s, globals(), data)
|
||||
pass
|
||||
|
||||
class CallTemplateNode(LeafTemplateNode):
|
||||
def __init__(self, parent, s):
|
||||
LeafTemplateNode.__init__(self, parent, s)
|
||||
match = re_call.match(s)
|
||||
if match == None:
|
||||
self.parent.parser_exception(
|
||||
"[[%s]] is not a valid function call" % self.s)
|
||||
self.function_name = match.group(1)
|
||||
self.vars = "(" + match.group(2).strip() + ",)"
|
||||
|
||||
def execute(self, stream, data):
|
||||
self.parent.functions[self.function_name].call(
|
||||
eval(self.vars, globals(), data), stream, data)
|
||||
|
||||
|
||||
############################################################
|
||||
# Node factory
|
||||
template_factory_type_map = {
|
||||
'if' : IfTemplateNode,
|
||||
'for' : ForTemplateNode,
|
||||
'elif' : ElifTemplateNode,
|
||||
'else' : ElseTemplateNode,
|
||||
'def' : FunctionTemplateNode,
|
||||
'call' : CallTemplateNode,
|
||||
'exec' : ExecTemplateNode }
|
||||
template_factory_types = template_factory_type_map.keys()
|
||||
|
||||
def TemplateNodeFactory(parent):
|
||||
src = parent.parser_get()
|
||||
|
||||
if src == None:
|
||||
return None
|
||||
match = re_directive.search(src)
|
||||
if match == None:
|
||||
parent.parser_eat(len(src))
|
||||
return LeafTemplateNode(parent, src)
|
||||
elif src == '' or match.start() != 0:
|
||||
parent.parser_eat(match.start())
|
||||
return LeafTemplateNode(parent, src[:match.start()])
|
||||
else:
|
||||
directive = match.group()[2:-2].strip()
|
||||
parent.parser_eat(match.end())
|
||||
if directive == 'end':
|
||||
return 'end'
|
||||
elif re_comment.match(directive):
|
||||
return CommentTemplateNode(parent, directive)
|
||||
else:
|
||||
for i in template_factory_types:
|
||||
if directive[0:len(i)] == i:
|
||||
return template_factory_type_map[i](parent, directive)
|
||||
return ExpressionTemplateNode(parent, directive)
|
||||
|
||||
|
||||
############################################################
|
||||
# TESTING CODE
|
||||
if __name__ == '__main__':
|
||||
combinations = (('OneBit', 'Float', 'GreyScale'),
|
||||
('GreyScale', 'RGB'))
|
||||
|
||||
template = Template("""
|
||||
[[# This is a comment #]]
|
||||
[[# This example does recursive function calls need to generate feature combinations #]]
|
||||
[[def switch(layer, args)]]
|
||||
switch(m[[layer]].id) {
|
||||
[[for option in combinations[layer]]]
|
||||
[[exec current = option + '(m' + str(layer) + ')']]
|
||||
case [[option]]:
|
||||
[[if layer == layers - 1]]
|
||||
function_call([[string.join(args + [current], ',')]]);
|
||||
[[else]]
|
||||
[[call switch(layer + 1, args + [current])]]
|
||||
[[end]]
|
||||
break;
|
||||
[[end]]
|
||||
}
|
||||
[[end]]
|
||||
|
||||
PyObject *py_overload_resolution_[[function_name]](PyObject *args) {
|
||||
[[call switch(0, [])]]
|
||||
}
|
||||
""")
|
||||
|
||||
data = {'combinations' : combinations,
|
||||
'function_name' : 'threshold',
|
||||
'layers' : 2}
|
||||
template.execute(sys.stdout, data)
|
143
refpolicy/support/sedoctool.py
Executable file
143
refpolicy/support/sedoctool.py
Executable file
@ -0,0 +1,143 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Author: Joshua Brindle <jbrindle@tresys.com>
|
||||
#
|
||||
# Copyright (C) 2003 - 2005 Tresys Technology, LLC
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, version 2.
|
||||
|
||||
"""
|
||||
this does dstuff
|
||||
"""
|
||||
|
||||
import sys
|
||||
import getopt
|
||||
import pyplate
|
||||
from xml.dom.ext import *
|
||||
from xml.dom.ext.reader import Sax2
|
||||
|
||||
def read_policy_xml(filename):
|
||||
try:
|
||||
reader = Sax2.Reader()
|
||||
doc = reader.fromStream(filename)
|
||||
except:
|
||||
error("Error while parsing xml")
|
||||
|
||||
return doc
|
||||
|
||||
def gen_tunable_conf(doc, file):
|
||||
for node in doc.getElementsByTagName("tunable"):
|
||||
s = string.split(node.firstChild.data, "\n")
|
||||
for line in s:
|
||||
file.write("# %s\n" % line)
|
||||
tun_name = tun_val = None
|
||||
for (name, value) in node.attributes.items():
|
||||
if name[1] == "name":
|
||||
tun_name = value.value
|
||||
elif name[1] == "dftval":
|
||||
tun_val = value.value
|
||||
|
||||
if tun_name and tun_val:
|
||||
file.write("%s = %s\n\n" % (tun_name, tun_val))
|
||||
tun_name = tun_val = None
|
||||
|
||||
def gen_module_conf(doc, file):
|
||||
for node in doc.getElementsByTagName("module"):
|
||||
for desc in node.getElementsByTagName("summary"):
|
||||
s = string.split(desc.firstChild.data, "\n")
|
||||
for line in s:
|
||||
file.write("# %s\n" % line)
|
||||
file.write("#\n")
|
||||
for (name, value) in node.attributes.items():
|
||||
if name[1] == "name":
|
||||
file.write("# %s\n\n" % value.value)
|
||||
|
||||
def gen_docs(doc, file):
|
||||
try:
|
||||
bodyfile = open("templates/header.html", "r")
|
||||
intfile = open("templates/interface.html", "r")
|
||||
except:
|
||||
error("Could not open templates")
|
||||
|
||||
interface_buf = None
|
||||
interface_parameters = {}
|
||||
|
||||
for node in doc.getElementsByTagName("module"):
|
||||
for interface in node.getElementsByTagName("interface"):
|
||||
interface_tpl = pyplate.Template(intfile.read())
|
||||
for i,v in interface.attributes.items():
|
||||
interface_name = v
|
||||
for desc in interface.getElementsByTagName("description"):
|
||||
interface_desc = desc.firstChild.data
|
||||
for desc in interface.getElementsByTagName("securitydesc"):
|
||||
if desc:
|
||||
interface_secdesc = desc.firstChild.data
|
||||
else:
|
||||
interface_secdesc = None
|
||||
|
||||
for args in interface.getElementsByTagName("parameter"):
|
||||
paramdesc = args.firstChild.data
|
||||
for i,v in interface.attributes.items():
|
||||
arg = { "name" : v,
|
||||
"desc" : paramdesc }
|
||||
|
||||
|
||||
def error(error):
|
||||
sys.stderr.write("%s exiting for: " % sys.argv[0])
|
||||
sys.stderr.write("%s\n" % error)
|
||||
sys.stderr.flush()
|
||||
sys.exit(1)
|
||||
|
||||
def usage():
|
||||
sys.stdout.write("%s [-tmd] -x <xmlfile>\n\n" % sys.argv[0])
|
||||
sys.stdout.write("Options:\n")
|
||||
sys.stdout.write("-t --tunables -- write tunable config to <file>\n")
|
||||
sys.stdout.write("-m --modules <file> -- write module config to <file>\n")
|
||||
sys.stdout.write("-d --docs <dir> -- write interface documentation to <dir>\n")
|
||||
sys.stdout.write("-x --xml <file> -- filename to read xml data from\n")
|
||||
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], "t:m:d:x:", ["tunables","modules","docs","xml"])
|
||||
except getopt.GetoptError:
|
||||
usage()
|
||||
sys.exit(1)
|
||||
|
||||
tunables = modules = docs = xmlfile = None
|
||||
|
||||
for opt, val in opts:
|
||||
if opt in ("-t", "--tunables"):
|
||||
tunables = val
|
||||
if opt in ("-m", "--modules"):
|
||||
modules = val
|
||||
if opt in ("-d", "--docs"):
|
||||
docs = val
|
||||
if opt in ("-x", "--xml"):
|
||||
xmlfile = val
|
||||
|
||||
if xmlfile == None:
|
||||
usage()
|
||||
sys.exit(1)
|
||||
|
||||
doc = read_policy_xml(xmlfile)
|
||||
|
||||
if tunables:
|
||||
try:
|
||||
conf = open(tunables, 'w')
|
||||
except:
|
||||
error("Could not open tunables file for writing")
|
||||
gen_tunable_conf(doc, conf)
|
||||
conf.close()
|
||||
|
||||
|
||||
if modules:
|
||||
try:
|
||||
conf = open(modules, 'w')
|
||||
except:
|
||||
error("Could not open modules file for writing")
|
||||
gen_module_conf(doc, conf)
|
||||
conf.close()
|
||||
|
||||
if docs:
|
||||
gen_docs(doc, sys.stdout)
|
Loading…
Reference in New Issue
Block a user