Initial Contribution
This commit is contained in:
147
libc/kernel/tools/clean_header.py
Executable file
147
libc/kernel/tools/clean_header.py
Executable file
@@ -0,0 +1,147 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
|
||||
import sys, cpp, kernel, glob, os, re, getopt
|
||||
from defaults import *
|
||||
from utils import *
|
||||
|
||||
noUpdate = 1
|
||||
|
||||
def cleanupFile( path ):
|
||||
"""reads an original header and perform the cleanup operation on it
|
||||
this functions returns the destination path and the clean header
|
||||
as a single string"""
|
||||
# check the header path
|
||||
src_path = path
|
||||
|
||||
if not os.path.exists(src_path):
|
||||
if noUpdate:
|
||||
panic( "file does not exist: '%s'\n" % path )
|
||||
sys.stderr.write( "warning: file does not exit: %s\n" % path )
|
||||
return None, None
|
||||
|
||||
if not os.path.isfile(src_path):
|
||||
if noUpdate:
|
||||
panic( "path is not a file: '%s'\n" % path )
|
||||
sys.stderr.write( "warning: not a file: %s\n" % path )
|
||||
return None, None
|
||||
|
||||
original_path = kernel_original_path
|
||||
if os.path.commonprefix( [ src_path, original_path ] ) != original_path:
|
||||
if noUpdate:
|
||||
panic( "file is not in 'original' directory: %s\n" % path );
|
||||
sys.stderr.write( "warning: file not in 'original' ignored: %s\n" % path )
|
||||
return None, None
|
||||
|
||||
src_path = src_path[len(original_path):]
|
||||
if len(src_path) > 0 and src_path[0] == '/':
|
||||
src_path = src_path[1:]
|
||||
|
||||
if len(src_path) == 0:
|
||||
panic( "oops, internal error, can't extract correct relative path" )
|
||||
|
||||
# convert into destination path, extracting architecture if needed
|
||||
# and the corresponding list of known static functions
|
||||
#
|
||||
arch = None
|
||||
re_asm_arch = re.compile( r"asm-([\w\d_\+\.\-]+)(/.*)" )
|
||||
m = re_asm_arch.match(src_path)
|
||||
statics = kernel_known_generic_statics
|
||||
if m and m.group(1) != 'generic':
|
||||
dst_path = "arch-%s/asm/%s" % m.groups()
|
||||
arch = m.group(1)
|
||||
statics = statics.union( kernel_known_statics.get( arch, set() ) )
|
||||
else:
|
||||
dst_path = "common/" + src_path
|
||||
|
||||
dst_path = os.path.normpath( original_path + "/../" + dst_path )
|
||||
|
||||
# now, let's parse the file
|
||||
#
|
||||
list = cpp.BlockParser().parseFile(path)
|
||||
if not list:
|
||||
sys.stderr.write( "error: can't parse '%s'" % path )
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
list.optimizeMacros( kernel_known_macros )
|
||||
list.optimizeIf01()
|
||||
list.removeVarsAndFuncs( statics )
|
||||
list.removeComments()
|
||||
list.removeEmptyLines()
|
||||
list.insertDisclaimer( kernel.kernel_disclaimer )
|
||||
|
||||
out = StringOutput()
|
||||
list.write(out)
|
||||
return dst_path, out.get()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
def usage():
|
||||
print """\
|
||||
usage: %s [options] <header_path>
|
||||
|
||||
options:
|
||||
-v enable verbose mode
|
||||
|
||||
-u enabled update mode
|
||||
this will try to update the corresponding 'clean header'
|
||||
if the content has changed. with this, you can pass more
|
||||
than one file on the command-line
|
||||
|
||||
<header_path> must be in a subdirectory of 'original'
|
||||
""" % os.path.basename(sys.argv[0])
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
optlist, args = getopt.getopt( sys.argv[1:], 'uv' )
|
||||
except:
|
||||
# unrecognized option
|
||||
sys.stderr.write( "error: unrecognized option\n" )
|
||||
usage()
|
||||
|
||||
for opt, arg in optlist:
|
||||
if opt == '-u':
|
||||
noUpdate = 0
|
||||
elif opt == '-v':
|
||||
verbose = 1
|
||||
D_setlevel(1)
|
||||
|
||||
if len(args) == 0:
|
||||
usage()
|
||||
|
||||
if noUpdate:
|
||||
for path in args:
|
||||
dst_path, newdata = cleanupFile(path)
|
||||
print newdata
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
# now let's update our files.
|
||||
|
||||
b = BatchFileUpdater()
|
||||
|
||||
for path in args:
|
||||
dst_path, newdata = cleanupFile(path)
|
||||
if not dst_path:
|
||||
continue
|
||||
|
||||
b.readFile( dst_path )
|
||||
r = b.editFile( dst_path, newdata )
|
||||
if r == 0:
|
||||
r = "unchanged"
|
||||
elif r == 1:
|
||||
r = "edited"
|
||||
else:
|
||||
r = "added"
|
||||
|
||||
print "cleaning: %-*s -> %-*s (%s)" % ( 35, path, 35, dst_path, r )
|
||||
|
||||
|
||||
if os.environ.has_key("ANDROID_PRODUCT_OUT"):
|
||||
b.updateP4Files()
|
||||
else:
|
||||
b.updateFiles()
|
||||
|
||||
sys.exit(0)
|
2159
libc/kernel/tools/cpp.py
Normal file
2159
libc/kernel/tools/cpp.py
Normal file
File diff suppressed because it is too large
Load Diff
83
libc/kernel/tools/defaults.py
Normal file
83
libc/kernel/tools/defaults.py
Normal file
@@ -0,0 +1,83 @@
|
||||
# this module contains all the defaults used by the generation of cleaned-up headers
|
||||
# for the Bionic C library
|
||||
#
|
||||
|
||||
import time, os, sys
|
||||
from utils import *
|
||||
|
||||
# the list of supported architectures
|
||||
#
|
||||
kernel_archs = [ 'arm', 'x86' ]
|
||||
|
||||
# the list of include directories that belong to the kernel
|
||||
# tree. used when looking for sources...
|
||||
#
|
||||
kernel_dirs = [ "linux", "asm", "asm-generic", "mtd" ]
|
||||
|
||||
# path to the directory containing the original kernel headers
|
||||
#
|
||||
kernel_original_path = os.path.normpath( find_program_dir() + '/../original' )
|
||||
|
||||
# a special value that is used to indicate that a given macro is known to be
|
||||
# undefined during optimization
|
||||
kCppUndefinedMacro = "<<<undefined>>>"
|
||||
|
||||
# this is the set of known macros we want to totally optimize out from the
|
||||
# final headers
|
||||
kernel_known_macros = {
|
||||
"__KERNEL__": kCppUndefinedMacro,
|
||||
"__KERNEL_STRICT_NAMES":"1",
|
||||
"__CHECKER__": kCppUndefinedMacro,
|
||||
"__CHECK_ENDIAN__": kCppUndefinedMacro,
|
||||
}
|
||||
|
||||
# define to true if you want to remove all defined(CONFIG_FOO) tests
|
||||
# from the clean headers. testing shows that this is not strictly necessary
|
||||
# but just generates cleaner results
|
||||
kernel_remove_config_macros = True
|
||||
|
||||
# this is the set of known static inline functions that we want to keep
|
||||
# in the final ARM headers. this is only used to keep optimized byteswapping
|
||||
# static functions and stuff like that.
|
||||
kernel_known_arm_statics = set(
|
||||
[ "___arch__swab32", # asm-arm/byteorder.h
|
||||
]
|
||||
)
|
||||
|
||||
kernel_known_x86_statics = set(
|
||||
[ "___arch__swab32", # asm-x86/byteorder.h
|
||||
"___arch__swab64", # asm-x86/byteorder.h
|
||||
]
|
||||
)
|
||||
|
||||
kernel_known_generic_statics = set(
|
||||
[ "__invalid_size_argument_for_IOC", # asm-generic/ioctl.h
|
||||
"__cmsg_nxthdr", # linux/socket.h
|
||||
"cmsg_nxthdr", # linux/socket.h
|
||||
"ipt_get_target",
|
||||
]
|
||||
)
|
||||
|
||||
# this maps an architecture to the set of static inline functions that
|
||||
# we want to keep in the final headers
|
||||
#
|
||||
kernel_known_statics = {
|
||||
"arm" : kernel_known_arm_statics,
|
||||
"x86" : kernel_known_x86_statics
|
||||
}
|
||||
|
||||
# this is the standard disclaimer
|
||||
#
|
||||
kernel_disclaimer = """\
|
||||
/****************************************************************************
|
||||
****************************************************************************
|
||||
***
|
||||
*** This header was automatically generated from a Linux kernel header
|
||||
*** of the same name, to make information necessary for userspace to
|
||||
*** call into the kernel available to libc. It contains only constants,
|
||||
*** structures, and macros generated from the original header, and thus,
|
||||
*** contains no copyrightable information.
|
||||
***
|
||||
****************************************************************************
|
||||
****************************************************************************/
|
||||
"""
|
175
libc/kernel/tools/find_headers.py
Executable file
175
libc/kernel/tools/find_headers.py
Executable file
@@ -0,0 +1,175 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# this program is used to find source code that includes linux kernel headers directly
|
||||
# (e.g. with #include <linux/...> or #include <asm/...>)
|
||||
#
|
||||
# then it lists
|
||||
|
||||
import sys, cpp, glob, os, re, getopt, kernel
|
||||
from utils import *
|
||||
from defaults import *
|
||||
|
||||
program_dir = find_program_dir()
|
||||
|
||||
wanted_archs = kernel_archs
|
||||
wanted_include = os.path.normpath(program_dir + '/../original')
|
||||
wanted_config = os.path.normpath(program_dir + '/../original/config')
|
||||
|
||||
def usage():
|
||||
print """\
|
||||
usage: find_headers.py [options] (file|directory|@listfile)+
|
||||
|
||||
options:
|
||||
-d <include-dir> specify alternate kernel headers
|
||||
'include' directory
|
||||
('%s' by default)
|
||||
|
||||
-c <file> specify alternate .config file
|
||||
('%s' by default)
|
||||
|
||||
-a <archs> used to specify an alternative list
|
||||
of architectures to support
|
||||
('%s' by default)
|
||||
|
||||
-v enable verbose mode
|
||||
|
||||
this program is used to find all the kernel headers that are used
|
||||
by a set of source files or directories containing them. the search
|
||||
is recursive to find *all* required files.
|
||||
|
||||
""" % ( wanted_include, wanted_config, string.join(kernel_archs,",") )
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
try:
|
||||
optlist, args = getopt.getopt( sys.argv[1:], 'vc:d:a:' )
|
||||
except:
|
||||
# unrecognized option
|
||||
print "error: unrecognized option"
|
||||
usage()
|
||||
|
||||
for opt, arg in optlist:
|
||||
if opt == '-a':
|
||||
wanted_archs = string.split(arg,',')
|
||||
elif opt == '-d':
|
||||
wanted_include = arg
|
||||
elif opt == '-c':
|
||||
wanted_config = arg
|
||||
elif opt == '-v':
|
||||
kernel.verboseSearch = 1
|
||||
kernel.verboseFind = 1
|
||||
verbose = 1
|
||||
else:
|
||||
usage()
|
||||
|
||||
if len(args) < 1:
|
||||
usage()
|
||||
|
||||
kernel_root = wanted_include
|
||||
if not os.path.exists(kernel_root):
|
||||
sys.stderr.write( "error: directory '%s' does not exist\n" % kernel_root )
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(kernel_root):
|
||||
sys.stderr.write( "error: '%s' is not a directory\n" % kernel_root )
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isdir(kernel_root+"/linux"):
|
||||
sys.stderr.write( "error: '%s' does not have a 'linux' directory\n" % kernel_root )
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.exists(wanted_config):
|
||||
sys.stderr.write( "error: file '%s' does not exist\n" % wanted_config )
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isfile(wanted_config):
|
||||
sys.stderr.write( "error: '%s' is not a file\n" % wanted_config )
|
||||
sys.exit(1)
|
||||
|
||||
# find all architectures in the kernel tree
|
||||
re_asm_ = re.compile(r"asm-(\w+)")
|
||||
archs = []
|
||||
for dir in os.listdir(kernel_root):
|
||||
m = re_asm_.match(dir)
|
||||
if m:
|
||||
if verbose: print ">> found kernel arch '%s'" % m.group(1)
|
||||
archs.append(m.group(1))
|
||||
|
||||
# if we're using the 'kernel_headers' directory, there is only asm/
|
||||
# and no other asm-<arch> directories (arm is assumed, which sucks)
|
||||
#
|
||||
in_kernel_headers = False
|
||||
if len(archs) == 0:
|
||||
# this can happen when we're using the 'kernel_headers' directory
|
||||
if os.path.isdir(kernel_root+"/asm"):
|
||||
in_kernel_headers = True
|
||||
archs = [ "arm" ]
|
||||
|
||||
# if the user has specified some architectures with -a <archs> ensure that
|
||||
# all those he wants are available from the kernel include tree
|
||||
if wanted_archs != None:
|
||||
if in_kernel_headers and wanted_archs != [ "arm" ]:
|
||||
sys.stderr.write( "error: when parsing kernel_headers, 'arm' architecture only is supported at the moment\n" )
|
||||
sys.exit(1)
|
||||
missing = []
|
||||
for arch in wanted_archs:
|
||||
if arch not in archs:
|
||||
missing.append(arch)
|
||||
if len(missing) > 0:
|
||||
sys.stderr.write( "error: the following requested architectures are not in the kernel tree: " )
|
||||
for a in missing:
|
||||
sys.stderr.write( " %s" % a )
|
||||
sys.stderr.write( "\n" )
|
||||
sys.exit(1)
|
||||
|
||||
archs = wanted_archs
|
||||
|
||||
# helper function used to walk the user files
|
||||
def parse_file(path, parser):
|
||||
parser.parseFile(path)
|
||||
|
||||
|
||||
# remove previous destination directory
|
||||
#destdir = "/tmp/bionic-kernel-headers/"
|
||||
#cleanup_dir(destdir)
|
||||
|
||||
# try to read the config file
|
||||
try:
|
||||
cparser = kernel.ConfigParser()
|
||||
cparser.parseFile( wanted_config )
|
||||
except:
|
||||
sys.stderr.write( "error: can't parse '%s'" % wanted_config )
|
||||
sys.exit(1)
|
||||
|
||||
kernel_config = cparser.getDefinitions()
|
||||
|
||||
# first, obtain the list of kernel files used by our clients
|
||||
fparser = kernel.HeaderScanner()
|
||||
walk_source_files( args, parse_file, fparser, excludes=["kernel_headers"] )
|
||||
headers = fparser.getHeaders()
|
||||
files = fparser.getFiles()
|
||||
|
||||
# now recursively scan the kernel headers for additionnal sub-included headers
|
||||
hparser = kernel.KernelHeaderFinder(headers,archs,kernel_root,kernel_config)
|
||||
headers = hparser.scanForAllArchs()
|
||||
|
||||
if 0: # just for debugging
|
||||
dumpHeaderUsers = False
|
||||
|
||||
print "the following %d headers:" % len(headers)
|
||||
for h in sorted(headers):
|
||||
if dumpHeaderUsers:
|
||||
print " %s (%s)" % (h, repr(hparser.getHeaderUsers(h)))
|
||||
else:
|
||||
print " %s" % h
|
||||
|
||||
print "are used by the following %d files:" % len(files)
|
||||
for f in sorted(files):
|
||||
print " %s" % f
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
for h in sorted(headers):
|
||||
print h
|
||||
|
||||
sys.exit(0)
|
63
libc/kernel/tools/find_users.py
Executable file
63
libc/kernel/tools/find_users.py
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# this program is used to find source code that includes linux kernel headers directly
|
||||
# (e.g. with #include <linux/...> or #include <asm/...>)
|
||||
#
|
||||
# then it lists
|
||||
|
||||
import sys, cpp, glob, os, re, getopt
|
||||
import kernel
|
||||
from utils import *
|
||||
from defaults import *
|
||||
|
||||
|
||||
def usage():
|
||||
print """\
|
||||
usage: find_users.py [-v] (file|directory|@listfile)+
|
||||
|
||||
this program is used to scan a list of files or directories for
|
||||
sources that include kernel headers directly. the program prints
|
||||
the list of said source files when it's done.
|
||||
|
||||
when scanning directories, only files matching the following
|
||||
extension will be searched: .c .cpp .S .h
|
||||
|
||||
use -v to enable verbose output
|
||||
"""
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
try:
|
||||
optlist, args = getopt.getopt( sys.argv[1:], 'v' )
|
||||
except:
|
||||
# unrecognized option
|
||||
print "error: unrecognized option"
|
||||
usage()
|
||||
|
||||
for opt, arg in optlist:
|
||||
if opt == '-v':
|
||||
kernel.verboseSearch = 1
|
||||
kernel.verboseFind = 1
|
||||
else:
|
||||
usage()
|
||||
|
||||
if len(args) < 1:
|
||||
usage()
|
||||
|
||||
# helper function used to walk the user files
|
||||
def parse_file(path, parser):
|
||||
parser.parseFile(path)
|
||||
|
||||
|
||||
# first, obtain the list of kernel files used by our clients
|
||||
# avoid parsing the 'kernel_headers' directory itself since we
|
||||
# use this program with the Android source tree by default.
|
||||
#
|
||||
fparser = kernel.HeaderScanner()
|
||||
walk_source_files( args, parse_file, fparser, excludes=["kernel_headers","original"] )
|
||||
files = fparser.getFiles()
|
||||
|
||||
for f in sorted(files):
|
||||
print f
|
||||
|
||||
sys.exit(0)
|
314
libc/kernel/tools/kernel.py
Normal file
314
libc/kernel/tools/kernel.py
Normal file
@@ -0,0 +1,314 @@
|
||||
# this file contains definitions related to the Linux kernel itself
|
||||
#
|
||||
|
||||
# list here the macros that you know are always defined/undefined when including
|
||||
# the kernel headers
|
||||
#
|
||||
import sys, cpp, re, os.path, string, time
|
||||
from defaults import *
|
||||
|
||||
verboseSearch = 0
|
||||
verboseFind = 0
|
||||
|
||||
########################################################################
|
||||
########################################################################
|
||||
##### #####
|
||||
##### H E A D E R S C A N N E R #####
|
||||
##### #####
|
||||
########################################################################
|
||||
########################################################################
|
||||
|
||||
|
||||
class HeaderScanner:
|
||||
"""a class used to non-recursively detect which Linux kernel headers are
|
||||
used by a given set of input source files"""
|
||||
|
||||
# to use the HeaderScanner, do the following:
|
||||
#
|
||||
# scanner = HeaderScanner()
|
||||
# for path in <your list of files>:
|
||||
# scanner.parseFile(path)
|
||||
#
|
||||
# # get the set of Linux headers included by your files
|
||||
# headers = scanner.getHeaders()
|
||||
#
|
||||
# # get the set of of input files that do include Linux headers
|
||||
# files = scanner.getFiles()
|
||||
#
|
||||
# note that the result of getHeaders() is a set of strings, each one
|
||||
# corresponding to a non-bracketed path name, e.g.:
|
||||
#
|
||||
# set("linux/types","asm/types.h")
|
||||
#
|
||||
|
||||
# the default algorithm is pretty smart and will analyze the input
|
||||
# files with a custom C pre-processor in order to optimize out macros,
|
||||
# get rid of comments, empty lines, etc..
|
||||
#
|
||||
# this avoids many annoying false positives... !!
|
||||
#
|
||||
|
||||
# this regular expression is used to detect include paths that relate to
|
||||
# the kernel, by default, it selects one of:
|
||||
# <linux/*>
|
||||
# <asm/*>
|
||||
# <asm-generic/*>
|
||||
# <mtd/*>
|
||||
#
|
||||
re_combined =\
|
||||
re.compile(r"^.*<((%s)/[\d\w_\+\.\-/]*)>.*$" % string.join(kernel_dirs,"|") )
|
||||
|
||||
def __init__(self,config={}):
|
||||
"""initialize a HeaderScanner"""
|
||||
self.reset()
|
||||
self.config = config
|
||||
|
||||
def reset(self,config={}):
|
||||
self.files = set() # set of files being parsed for headers
|
||||
self.headers = {} # maps headers to set of users
|
||||
self.config = config
|
||||
|
||||
def checkInclude(self,line,from_file):
|
||||
m = HeaderScanner.re_combined.match(line)
|
||||
if not m: return
|
||||
|
||||
header = m.group(1)
|
||||
if from_file:
|
||||
self.files.add(from_file)
|
||||
|
||||
if not header in self.headers:
|
||||
self.headers[header] = set()
|
||||
|
||||
if from_file:
|
||||
if verboseFind:
|
||||
print "=== %s uses %s" % (from_file, header)
|
||||
self.headers[header].add(from_file)
|
||||
|
||||
def parseFile(self,path):
|
||||
"""parse a given file for Linux headers"""
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
|
||||
# since tokenizing the file is very slow, we first try a quick grep
|
||||
# to see if this returns any meaningful results. only if this is true
|
||||
# do we do the tokenization"""
|
||||
try:
|
||||
f = open(path, "rt")
|
||||
except:
|
||||
print "!!! can't read '%s'" % path
|
||||
return
|
||||
|
||||
hasIncludes = False
|
||||
for line in f:
|
||||
if HeaderScanner.re_combined.match(line):
|
||||
hasIncludes = True
|
||||
break
|
||||
|
||||
if not hasIncludes:
|
||||
if verboseSearch: print "::: " + path
|
||||
return
|
||||
|
||||
if verboseSearch: print "*** " + path
|
||||
|
||||
list = cpp.BlockParser().parseFile(path)
|
||||
if list:
|
||||
#list.removePrefixed("CONFIG_",self.config)
|
||||
list.optimizeMacros(kernel_known_macros)
|
||||
list.optimizeIf01()
|
||||
includes = list.findIncludes()
|
||||
for inc in includes:
|
||||
self.checkInclude(inc,path)
|
||||
|
||||
def getHeaders(self):
|
||||
"""return the set of all needed kernel headers"""
|
||||
return set(self.headers.keys())
|
||||
|
||||
def getHeaderUsers(self,header):
|
||||
"""return the set of all users for a given header"""
|
||||
return set(self.headers.get(header))
|
||||
|
||||
def getAllUsers(self):
|
||||
"""return a dictionary mapping heaaders to their user set"""
|
||||
return self.headers.copy()
|
||||
|
||||
def getFiles(self):
|
||||
"""returns the set of files that do include kernel headers"""
|
||||
return self.files.copy()
|
||||
|
||||
|
||||
##########################################################################
|
||||
##########################################################################
|
||||
##### #####
|
||||
##### H E A D E R F I N D E R #####
|
||||
##### #####
|
||||
##########################################################################
|
||||
##########################################################################
|
||||
|
||||
|
||||
class KernelHeaderFinder:
|
||||
"""a class used to scan the kernel headers themselves."""
|
||||
|
||||
# this is different
|
||||
# from a HeaderScanner because we need to translate the path returned by
|
||||
# HeaderScanner.getHeaders() into possibly architecture-specific ones.
|
||||
#
|
||||
# for example, <asm/XXXX.h> needs to be translated in <asm-ARCH/XXXX.h>
|
||||
# where ARCH is appropriately chosen
|
||||
|
||||
# here's how to use this:
|
||||
#
|
||||
# scanner = HeaderScanner()
|
||||
# for path in <your list of user sources>:
|
||||
# scanner.parseFile(path)
|
||||
#
|
||||
# used_headers = scanner.getHeaders()
|
||||
# finder = KernelHeaderFinder(used_headers, [ "arm", "x86" ],
|
||||
# "<kernel_include_path>")
|
||||
# all_headers = finder.scanForAllArchs()
|
||||
#
|
||||
# not that the result of scanForAllArchs() is a list of relative
|
||||
# header paths that are not bracketed
|
||||
#
|
||||
|
||||
def __init__(self,headers,archs,kernel_root,kernel_config):
|
||||
"""init a KernelHeaderScanner,
|
||||
|
||||
'headers' is a list or set of headers,
|
||||
'archs' is a list of architectures
|
||||
'kernel_root' is the path to the 'include' directory
|
||||
of your original kernel sources
|
||||
"""
|
||||
|
||||
if len(kernel_root) > 0 and kernel_root[-1] != "/":
|
||||
kernel_root += "/"
|
||||
#print "using kernel_root %s" % kernel_root
|
||||
self.archs = archs
|
||||
self.searched = set(headers)
|
||||
self.kernel_root = kernel_root
|
||||
self.kernel_config = kernel_config
|
||||
self.needed = {}
|
||||
|
||||
def setArch(self,arch=None):
|
||||
if arch:
|
||||
self.prefix = "asm-%s/" % arch
|
||||
self.arch_headers = set()
|
||||
else:
|
||||
self.prefix = None
|
||||
self.arch_headers = set()
|
||||
|
||||
def pathFromHeader(self,header):
|
||||
path = header
|
||||
if self.prefix and path.startswith("asm/"):
|
||||
path = "%s%s" % (self.prefix, path[4:])
|
||||
return path
|
||||
|
||||
def pathToHeader(self,path):
|
||||
if self.prefix and path.startswith(self.prefix):
|
||||
path = "asm/%s" % path[len(self.prefix):]
|
||||
return "%s" % path
|
||||
|
||||
def setSearchedHeaders(self,headers):
|
||||
self.searched = set(headers)
|
||||
|
||||
def scanForArch(self):
|
||||
fparser = HeaderScanner(config=self.kernel_config)
|
||||
workqueue = []
|
||||
needed = {}
|
||||
for h in self.searched:
|
||||
path = self.pathFromHeader(h)
|
||||
if not path in needed:
|
||||
needed[path] = set()
|
||||
workqueue.append(path)
|
||||
|
||||
i = 0
|
||||
while i < len(workqueue):
|
||||
path = workqueue[i]
|
||||
i += 1
|
||||
fparser.parseFile(self.kernel_root + path)
|
||||
for used in fparser.getHeaders():
|
||||
path = self.pathFromHeader(used)
|
||||
if not path in needed:
|
||||
needed[path] = set()
|
||||
workqueue.append(path)
|
||||
for user in fparser.getHeaderUsers(used):
|
||||
needed[path].add(user)
|
||||
|
||||
# now copy the arch-specific headers into the global list
|
||||
for header in needed.keys():
|
||||
users = needed[header]
|
||||
if not header in self.needed:
|
||||
self.needed[header] = set()
|
||||
|
||||
for user in users:
|
||||
self.needed[header].add(user)
|
||||
|
||||
def scanForAllArchs(self):
|
||||
"""scan for all architectures and return the set of all needed kernel headers"""
|
||||
for arch in self.archs:
|
||||
self.setArch(arch)
|
||||
self.scanForArch()
|
||||
|
||||
return set(self.needed.keys())
|
||||
|
||||
def getHeaderUsers(self,header):
|
||||
"""return the set of all users for a given header"""
|
||||
return set(self.needed[header])
|
||||
|
||||
def getArchHeaders(self,arch):
|
||||
"""return the set of all <asm/...> headers required by a given architecture"""
|
||||
return set() # XXX: TODO
|
||||
|
||||
#####################################################################################
|
||||
#####################################################################################
|
||||
##### #####
|
||||
##### C O N F I G P A R S E R #####
|
||||
##### #####
|
||||
#####################################################################################
|
||||
#####################################################################################
|
||||
|
||||
class ConfigParser:
|
||||
"""a class used to parse the Linux kernel .config file"""
|
||||
re_CONFIG_ = re.compile(r"^(CONFIG_\w+)=(.*)$")
|
||||
|
||||
def __init__(self):
|
||||
self.items = {}
|
||||
self.duplicates = False
|
||||
|
||||
def parseLine(self,line):
|
||||
line = string.strip(line)
|
||||
|
||||
# skip empty and comment lines
|
||||
if len(line) == 0 or line[0] == "#":
|
||||
return
|
||||
|
||||
m = ConfigParser.re_CONFIG_.match(line)
|
||||
if not m: return
|
||||
|
||||
name = m.group(1)
|
||||
value = m.group(2)
|
||||
|
||||
if name in self.items: # aarg, duplicate value
|
||||
self.duplicates = True
|
||||
|
||||
self.items[name] = value
|
||||
|
||||
def parseFile(self,path):
|
||||
f = file(path, "r")
|
||||
for line in f:
|
||||
if len(line) > 0:
|
||||
if line[-1] == "\n":
|
||||
line = line[:-1]
|
||||
if len(line) > 0 and line[-1] == "\r":
|
||||
line = line[:-1]
|
||||
self.parseLine(line)
|
||||
f.close()
|
||||
|
||||
def getDefinitions(self):
|
||||
"""retrieve a dictionary containing definitions for CONFIG_XXX"""
|
||||
return self.items.copy()
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.items)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.items)
|
83
libc/kernel/tools/update_all.py
Executable file
83
libc/kernel/tools/update_all.py
Executable file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
import sys, cpp, kernel, glob, os, re, getopt, clean_header
|
||||
from defaults import *
|
||||
from utils import *
|
||||
|
||||
def usage():
|
||||
print """\
|
||||
usage: %(progname)s
|
||||
|
||||
this program is used to update all the auto-generated clean headers
|
||||
used by the Bionic C library. it assumes the following:
|
||||
|
||||
- a set of source kernel headers is located in '../original',
|
||||
relative to the program's directory
|
||||
|
||||
- the clean headers will be placed in '../arch-<arch>/asm',
|
||||
'../common/linux', '../common/asm-generic', etc..
|
||||
|
||||
- if ANDROID_PRODUCT_OUT is defined in your environment, you're
|
||||
using the Android build system, and the program will issue
|
||||
p4 add / edit / delete commands to update the depot for you.
|
||||
(you'll need to p4 submit manually though)
|
||||
""" % { "progname" : os.path.basename(sys.argv[0]) }
|
||||
sys.exit(0)
|
||||
|
||||
try:
|
||||
optlist, args = getopt.getopt( sys.argv[1:], '' )
|
||||
except:
|
||||
# unrecognized option
|
||||
sys.stderr.write( "error: unrecognized option\n" )
|
||||
usage()
|
||||
|
||||
if len(optlist) > 0 or len(args) > 0:
|
||||
usage()
|
||||
|
||||
progdir = find_program_dir()
|
||||
original_dir = os.path.normpath( progdir + "/../original" )
|
||||
if not os.path.isdir( original_dir ):
|
||||
panic( "required directory does not exists: %s\n" % original_dir )
|
||||
|
||||
# find all source files in 'original'
|
||||
#
|
||||
sources = []
|
||||
for root, dirs, files in os.walk( original_dir ):
|
||||
for file in files:
|
||||
base, ext = os.path.splitext(file)
|
||||
if ext == ".h":
|
||||
sources.append( "%s/%s" % (root,file) )
|
||||
|
||||
b = BatchFileUpdater()
|
||||
|
||||
for arch in kernel_archs:
|
||||
b.readDir( os.path.normpath( progdir + "/../arch-%s" % arch ) )
|
||||
|
||||
b.readDir( os.path.normpath( progdir + "/../common" ) )
|
||||
|
||||
#print "OLD " + repr(b.old_files)
|
||||
|
||||
for path in sources:
|
||||
dst_path, newdata = clean_header.cleanupFile(path)
|
||||
if not dst_path:
|
||||
continue
|
||||
|
||||
b.readFile( dst_path )
|
||||
r = b.editFile( dst_path, newdata )
|
||||
if r == 0:
|
||||
r = "unchanged"
|
||||
elif r == 1:
|
||||
r = "edited"
|
||||
else:
|
||||
r = "added"
|
||||
|
||||
print "cleaning: %-*s -> %-*s (%s)" % ( 35, path, 35, dst_path, r )
|
||||
|
||||
usePerforce = os.environ.has_key("ANDROID_PRODUCT_OUT")
|
||||
|
||||
if usePerforce:
|
||||
b.updateP4Files()
|
||||
else:
|
||||
b.updateFiles()
|
||||
|
||||
sys.exit(0)
|
397
libc/kernel/tools/utils.py
Normal file
397
libc/kernel/tools/utils.py
Normal file
@@ -0,0 +1,397 @@
|
||||
# common python utility routines for the Bionic tool scripts
|
||||
|
||||
import sys, os, commands, string, commands
|
||||
|
||||
# basic debugging trace support
|
||||
# call D_setlevel to set the verbosity level
|
||||
# and D(), D2(), D3(), D4() to add traces
|
||||
#
|
||||
verbose = 0
|
||||
|
||||
def panic(msg):
|
||||
sys.stderr.write( find_program_name() + ": error: " )
|
||||
sys.stderr.write( msg )
|
||||
sys.exit(1)
|
||||
|
||||
def D(msg):
|
||||
global verbose
|
||||
if verbose > 0:
|
||||
print msg
|
||||
|
||||
def D2(msg):
|
||||
global verbose
|
||||
if verbose >= 2:
|
||||
print msg
|
||||
|
||||
def D3(msg):
|
||||
global verbose
|
||||
if verbose >= 3:
|
||||
print msg
|
||||
|
||||
def D4(msg):
|
||||
global verbose
|
||||
if verbose >= 4:
|
||||
print msg
|
||||
|
||||
def D_setlevel(level):
|
||||
global verbose
|
||||
verbose = level
|
||||
|
||||
|
||||
# other stuff
|
||||
#
|
||||
#
|
||||
def find_program_name():
|
||||
return os.path.basename(sys.argv[0])
|
||||
|
||||
def find_program_dir():
|
||||
return os.path.dirname(sys.argv[0])
|
||||
|
||||
def find_file_from_upwards(from_path,target_file):
|
||||
"""find a file in the current directory or its parents. if 'from_path' is None,
|
||||
seach from the current program's directory"""
|
||||
path = from_path
|
||||
if path == None:
|
||||
path = os.path.realpath(sys.argv[0])
|
||||
path = os.path.dirname(path)
|
||||
D("this script seems to be located in: %s" % path)
|
||||
|
||||
while 1:
|
||||
D("probing "+path)
|
||||
if path == "":
|
||||
file = target_file
|
||||
else:
|
||||
file = path + "/" + target_file
|
||||
|
||||
if os.path.isfile(file):
|
||||
D("found %s in %s" % (target_file, path))
|
||||
return file
|
||||
|
||||
if path == "":
|
||||
return None
|
||||
|
||||
path = os.path.dirname(path)
|
||||
|
||||
def find_bionic_root():
|
||||
file = find_file_from_upwards(None, "SYSCALLS.TXT")
|
||||
if file:
|
||||
return os.path.dirname(file)
|
||||
else:
|
||||
return None
|
||||
|
||||
def find_kernel_headers():
|
||||
"""try to find the directory containing the kernel headers for this machine"""
|
||||
status, version = commands.getstatusoutput( "uname -r" ) # get Linux kernel version
|
||||
if status != 0:
|
||||
D("could not execute 'uname -r' command properly")
|
||||
return None
|
||||
|
||||
# get rid of the "-xenU" suffix that is found in Xen virtual machines
|
||||
if len(version) > 5 and version[-5:] == "-xenU":
|
||||
version = version[:-5]
|
||||
|
||||
path = "/usr/src/linux-headers-" + version
|
||||
D("probing %s for kernel headers" % (path+"/include"))
|
||||
ret = os.path.isdir( path )
|
||||
if ret:
|
||||
D("found kernel headers in: %s" % (path + "/include"))
|
||||
return path
|
||||
return None
|
||||
|
||||
|
||||
# parser for the SYSCALLS.TXT file
|
||||
#
|
||||
class SysCallsTxtParser:
|
||||
def __init__(self):
|
||||
self.syscalls = []
|
||||
self.lineno = 0
|
||||
|
||||
def E(msg):
|
||||
print "%d: %s" % (self.lineno, msg)
|
||||
|
||||
def parse_line(self, line):
|
||||
pos_lparen = line.find('(')
|
||||
E = self.E
|
||||
if pos_lparen < 0:
|
||||
E("missing left parenthesis in '%s'" % line)
|
||||
return
|
||||
|
||||
pos_rparen = line.rfind(')')
|
||||
if pos_rparen < 0 or pos_rparen <= pos_lparen:
|
||||
E("missing or misplaced right parenthesis in '%s'" % line)
|
||||
return
|
||||
|
||||
return_type = line[:pos_lparen].strip().split()
|
||||
if len(return_type) < 2:
|
||||
E("missing return type in '%s'" % line)
|
||||
return
|
||||
|
||||
syscall_func = return_type[-1]
|
||||
return_type = string.join(return_type[:-1],' ')
|
||||
|
||||
pos_colon = syscall_func.find(':')
|
||||
if pos_colon < 0:
|
||||
syscall_name = syscall_func
|
||||
else:
|
||||
if pos_colon == 0 or pos_colon+1 >= len(syscall_func):
|
||||
E("misplaced colon in '%s'" % line)
|
||||
return
|
||||
syscall_name = syscall_func[pos_colon+1:]
|
||||
syscall_func = syscall_func[:pos_colon]
|
||||
|
||||
if pos_rparen > pos_lparen+1:
|
||||
syscall_params = line[pos_lparen+1:pos_rparen].split(',')
|
||||
params = string.join(syscall_params,',')
|
||||
else:
|
||||
syscall_params = []
|
||||
params = "void"
|
||||
|
||||
number = line[pos_rparen+1:].strip()
|
||||
if number == "stub":
|
||||
syscall_id = -1
|
||||
syscall_id2 = -1
|
||||
else:
|
||||
try:
|
||||
if number[0] == '#':
|
||||
number = number[1:].strip()
|
||||
numbers = string.split(number,',')
|
||||
syscall_id = int(numbers[0])
|
||||
syscall_id2 = syscall_id
|
||||
if len(numbers) > 1:
|
||||
syscall_id2 = int(numbers[1])
|
||||
except:
|
||||
E("invalid syscall number in '%s'" % line)
|
||||
return
|
||||
|
||||
t = { "id" : syscall_id,
|
||||
"id2" : syscall_id2,
|
||||
"name" : syscall_name,
|
||||
"func" : syscall_func,
|
||||
"params" : syscall_params,
|
||||
"decl" : "%-15s %s (%s);" % (return_type, syscall_func, params) }
|
||||
|
||||
self.syscalls.append(t)
|
||||
|
||||
def parse_file(self, file_path):
|
||||
fp = open(file_path)
|
||||
for line in fp.xreadlines():
|
||||
self.lineno += 1
|
||||
line = line.strip()
|
||||
if not line: continue
|
||||
if line[0] == '#': continue
|
||||
self.parse_line(line)
|
||||
|
||||
fp.close()
|
||||
|
||||
|
||||
class Output:
|
||||
def __init__(self,out=sys.stdout):
|
||||
self.out = out
|
||||
|
||||
def write(self,msg):
|
||||
self.out.write(msg)
|
||||
|
||||
def writeln(self,msg):
|
||||
self.out.write(msg)
|
||||
self.out.write("\n")
|
||||
|
||||
class StringOutput:
|
||||
def __init__(self):
|
||||
self.line = ""
|
||||
|
||||
def write(self,msg):
|
||||
self.line += msg
|
||||
D2("write '%s'" % msg)
|
||||
|
||||
def writeln(self,msg):
|
||||
self.line += msg + '\n'
|
||||
D2("write '%s\\n'"% msg)
|
||||
|
||||
def get(self):
|
||||
return self.line
|
||||
|
||||
|
||||
def create_file_path(path):
|
||||
dirs = []
|
||||
while 1:
|
||||
parent = os.path.dirname(path)
|
||||
#print "parent: %s <- %s" % (parent, path)
|
||||
if parent == "/" or parent == "":
|
||||
break
|
||||
dirs.append(parent)
|
||||
path = parent
|
||||
|
||||
dirs.reverse()
|
||||
for dir in dirs:
|
||||
#print "dir %s" % dir
|
||||
if os.path.isdir(dir):
|
||||
continue
|
||||
os.mkdir(dir)
|
||||
|
||||
def walk_source_files(paths,callback,args,excludes=[]):
|
||||
"""recursively walk a list of paths and files, only keeping the source files in directories"""
|
||||
for path in paths:
|
||||
if not os.path.isdir(path):
|
||||
callback(path,args)
|
||||
else:
|
||||
for root, dirs, files in os.walk(path):
|
||||
#print "w-- %s (ex: %s)" % (repr((root,dirs)), repr(excludes))
|
||||
if len(excludes):
|
||||
for d in dirs[:]:
|
||||
if d in excludes:
|
||||
dirs.remove(d)
|
||||
for f in files:
|
||||
r, ext = os.path.splitext(f)
|
||||
if ext in [ ".h", ".c", ".cpp", ".S" ]:
|
||||
callback( "%s/%s" % (root,f), args )
|
||||
|
||||
def cleanup_dir(path):
|
||||
"""create a directory if needed, and ensure that it is totally empty
|
||||
by removing any existing content in it"""
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
else:
|
||||
for root, dirs, files in os.walk(path, topdown=False):
|
||||
if root.endswith("kernel_headers/"):
|
||||
# skip 'kernel_headers'
|
||||
continue
|
||||
for name in files:
|
||||
os.remove(os.path.join(root, name))
|
||||
for name in dirs:
|
||||
os.rmdir(os.path.join(root, name))
|
||||
|
||||
def update_file( path, newdata ):
|
||||
"""update a file on disk, only if its content has changed"""
|
||||
if os.path.exists( path ):
|
||||
try:
|
||||
f = open( path, "r" )
|
||||
olddata = f.read()
|
||||
f.close()
|
||||
except:
|
||||
D("update_file: cannot read existing file '%s'" % path)
|
||||
return 0
|
||||
|
||||
if oldata == newdata:
|
||||
D2("update_file: no change to file '%s'" % path )
|
||||
return 0
|
||||
|
||||
update = 1
|
||||
else:
|
||||
try:
|
||||
create_file_path(path)
|
||||
except:
|
||||
D("update_file: cannot create path to '%s'" % path)
|
||||
return 0
|
||||
|
||||
f = open( path, "w" )
|
||||
f.write( newdata )
|
||||
f.close()
|
||||
|
||||
return 1
|
||||
|
||||
|
||||
class BatchFileUpdater:
|
||||
"""a class used to edit several files at once"""
|
||||
def __init__(self):
|
||||
self.old_files = set()
|
||||
self.new_files = set()
|
||||
self.new_data = {}
|
||||
|
||||
def readFile(self,path):
|
||||
#path = os.path.realpath(path)
|
||||
if os.path.exists(path):
|
||||
self.old_files.add(path)
|
||||
|
||||
def readDir(self,path):
|
||||
#path = os.path.realpath(path)
|
||||
for root, dirs, files in os.walk(path):
|
||||
for f in files:
|
||||
dst = "%s/%s" % (root,f)
|
||||
self.old_files.add(dst)
|
||||
|
||||
def editFile(self,dst,data):
|
||||
"""edit a destination file. if the file is not mapped from a source,
|
||||
it will be added. return 0 if the file content wasn't changed,
|
||||
1 if it was edited, or 2 if the file is new"""
|
||||
#dst = os.path.realpath(dst)
|
||||
result = 1
|
||||
if os.path.exists(dst):
|
||||
f = open(dst, "r")
|
||||
olddata = f.read()
|
||||
f.close()
|
||||
if olddata == data:
|
||||
self.old_files.remove(dst)
|
||||
return 0
|
||||
else:
|
||||
result = 2
|
||||
|
||||
self.new_data[dst] = data
|
||||
self.new_files.add(dst)
|
||||
return result
|
||||
|
||||
def getChanges(self):
|
||||
"""determine changes, returns (adds, deletes, edits)"""
|
||||
adds = set()
|
||||
edits = set()
|
||||
deletes = set()
|
||||
|
||||
for dst in self.new_files:
|
||||
if not (dst in self.old_files):
|
||||
adds.add(dst)
|
||||
else:
|
||||
edits.add(dst)
|
||||
|
||||
for dst in self.old_files:
|
||||
if not dst in self.new_files:
|
||||
deletes.add(dst)
|
||||
|
||||
return (adds, deletes, edits)
|
||||
|
||||
def _writeFile(self,dst,data=None):
|
||||
if not os.path.exists(os.path.dirname(dst)):
|
||||
create_file_path(dst)
|
||||
if data == None:
|
||||
data = self.new_data[dst]
|
||||
f = open(dst, "w")
|
||||
f.write(self.new_data[dst])
|
||||
f.close()
|
||||
|
||||
def updateFiles(self):
|
||||
adds, deletes, edits = self.getChanges()
|
||||
|
||||
for dst in sorted(adds):
|
||||
self._writeFile(dst)
|
||||
|
||||
for dst in sorted(edits):
|
||||
self._writeFile(dst)
|
||||
|
||||
for dst in sorted(deletes):
|
||||
os.remove(dst)
|
||||
|
||||
def updateP4Files(self):
|
||||
adds, deletes, edits = self.getChanges()
|
||||
|
||||
if len(adds):
|
||||
files = string.join(sorted(adds)," ")
|
||||
D( "%d new files will be p4 add-ed" % len(adds) )
|
||||
for dst in adds:
|
||||
self._writeFile(dst)
|
||||
D2("P4 ADDS: %s" % files)
|
||||
o = commands.getoutput( "p4 add " + files )
|
||||
D2( o )
|
||||
|
||||
if len(edits):
|
||||
files = string.join(sorted(edits)," ")
|
||||
D( "%d files will be p4 edit-ed" % len(edits) )
|
||||
D2("P4 EDITS: %s" % files)
|
||||
o = commands.getoutput( "p4 edit " + files )
|
||||
D2( o )
|
||||
for dst in edits:
|
||||
self._writeFile(dst)
|
||||
|
||||
if len(deletes):
|
||||
files = string.join(sorted(deletes)," ")
|
||||
D( "%d files will be p4 delete-d" % len(deletes) )
|
||||
D2("P4 DELETES: %s" % files)
|
||||
o = commands.getoutput( "p4 delete " + files )
|
||||
D2( o )
|
Reference in New Issue
Block a user