1
0
mirror of https://github.com/samba-team/samba.git synced 2024-12-25 23:21:54 +03:00
samba-mirror/buildtools/wafsamba/samba_utils.py

410 lines
12 KiB
Python
Raw Normal View History

# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
import Build, os, sys, Options, Utils, Task, re, fnmatch
from TaskGen import feature, before
from Configure import conf
from Logs import debug
2010-03-07 07:17:46 +03:00
import shlex
2010-03-20 08:27:48 +03:00
# TODO: make this a --option
LIB_PATH="shared"
@conf
def SET_TARGET_TYPE(ctx, target, value):
'''set the target type of a target'''
cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
if target in cache:
ASSERT(ctx, cache[target] == value,
"Target '%s' re-defined as %s - was %s" % (target, value, cache[target]))
debug("task_gen: Skipping duplicate target %s (curdir=%s)" % (target, ctx.curdir))
return False
LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
return True
def GET_TARGET_TYPE(ctx, target):
'''get target type from cache'''
cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
if not target in cache:
return None
return cache[target]
######################################################
# this is used as a decorator to make functions only
# run once. Based on the idea from
# http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
runonce_ret = {}
def runonce(function):
def runonce_wrapper(*args):
if args in runonce_ret:
return runonce_ret[args]
else:
ret = function(*args)
runonce_ret[args] = ret
return ret
return runonce_wrapper
def ADD_LD_LIBRARY_PATH(path):
'''add something to LD_LIBRARY_PATH'''
if 'LD_LIBRARY_PATH' in os.environ:
oldpath = os.environ['LD_LIBRARY_PATH']
else:
oldpath = ''
newpath = oldpath.split(':')
if not path in newpath:
newpath.append(path)
os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
def install_rpath(bld):
'''the rpath value for installation'''
bld.env['RPATH'] = []
bld.env['RPATH_ST'] = []
2010-03-20 15:41:15 +03:00
if bld.env.RPATH_ON_INSTALL:
return ['-Wl,-rpath=%s/lib' % bld.env.PREFIX]
return []
def build_rpath(bld):
'''the rpath value for build'''
rpath = os.path.normpath('%s/%s' % (bld.env['BUILD_DIRECTORY'], LIB_PATH))
bld.env['RPATH'] = []
bld.env['RPATH_ST'] = []
if bld.env.RPATH_ON_BUILD:
return ['-Wl,-rpath=%s' % rpath]
ADD_LD_LIBRARY_PATH(rpath)
return []
@conf
def LOCAL_CACHE(ctx, name):
'''return a named build cache dictionary, used to store
state inside other functions'''
if name in ctx.env:
return ctx.env[name]
ctx.env[name] = {}
return ctx.env[name]
@conf
def LOCAL_CACHE_SET(ctx, cachename, key, value):
'''set a value in a local cache'''
cache = LOCAL_CACHE(ctx, cachename)
cache[key] = value
@conf
def ASSERT(ctx, expression, msg):
'''a build assert call'''
if not expression:
sys.stderr.write("ERROR: %s\n" % msg)
raise AssertionError
Build.BuildContext.ASSERT = ASSERT
def SUBDIR(bld, subdir, list):
'''create a list of files by pre-pending each with a subdir name'''
ret = ''
2010-03-20 08:27:48 +03:00
for l in TO_LIST(list):
ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
return ret
Build.BuildContext.SUBDIR = SUBDIR
def dict_concat(d1, d2):
'''concatenate two dictionaries d1 += d2'''
for t in d2:
if t not in d1:
d1[t] = d2[t]
def exec_command(self, cmd, **kw):
'''this overrides the 'waf -v' debug output to be in a nice
unix like format instead of a python list.
Thanks to ita on #waf for this'''
import Utils, Logs
_cmd = cmd
if isinstance(cmd, list):
_cmd = ' '.join(cmd)
debug('runner: %s' % _cmd)
if self.log:
self.log.write('%s\n' % cmd)
kw['log'] = self.log
try:
if not kw.get('cwd', None):
kw['cwd'] = self.cwd
except AttributeError:
self.cwd = kw['cwd'] = self.bldnode.abspath()
return Utils.exec_command(cmd, **kw)
Build.BuildContext.exec_command = exec_command
def ADD_COMMAND(opt, name, function):
'''add a new top level command to waf'''
Utils.g_module.__dict__[name] = function
opt.name = function
Options.Handler.ADD_COMMAND = ADD_COMMAND
2010-03-20 08:27:48 +03:00
@feature('cc', 'cshlib', 'cprogram')
2010-03-17 13:53:29 +03:00
@before('apply_core','exec_rule')
def process_depends_on(self):
'''The new depends_on attribute for build rules
allow us to specify a dependency on output from
a source generation rule'''
if getattr(self , 'depends_on', None):
lst = self.to_list(self.depends_on)
for x in lst:
y = self.bld.name_to_obj(x, self.env)
2010-03-09 00:17:26 +03:00
self.bld.ASSERT(y is not None, "Failed to find dependency %s of %s" % (x, self.name))
y.post()
2010-03-17 13:46:38 +03:00
if getattr(y, 'more_includes', None):
self.includes += " " + y.more_includes
os_path_relpath = getattr(os.path, 'relpath', None)
if os_path_relpath is None:
# Python < 2.6 does not have os.path.relpath, provide a replacement
# (imported from Python2.6.5~rc2)
def os_path_relpath(path, start):
"""Return a relative version of a path"""
start_list = os.path.abspath(start).split("/")
path_list = os.path.abspath(path).split("/")
2010-03-17 13:46:38 +03:00
# Work out how much of the filepath is shared by start and path.
i = len(os.path.commonprefix([start_list, path_list]))
rel_list = ['..'] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return start
return os.path.join(*rel_list)
2010-03-17 13:46:38 +03:00
2010-03-20 08:27:48 +03:00
def unique_list(seq):
'''return a uniquified list in the same order as the existing list'''
seen = {}
result = []
for item in seq:
if item in seen: continue
seen[item] = True
result.append(item)
return result
2010-03-20 08:27:48 +03:00
def TO_LIST(str):
2010-03-07 07:17:46 +03:00
'''Split a list, preserving quoted strings and existing lists'''
if str is None:
return []
2010-03-07 07:17:46 +03:00
if isinstance(str, list):
return str
2010-03-20 08:27:48 +03:00
lst = str.split()
# the string may have had quotes in it, now we
# check if we did have quotes, and use the slower shlex
# if we need to
for e in lst:
if e[0] == '"':
return shlex.split(str)
return lst
2010-03-07 16:25:47 +03:00
def subst_vars_error(string, env):
'''substitute vars, throw an error if a variable is not defined'''
lst = re.split('(\$\{\w+\})', string)
out = []
for v in lst:
if re.match('\$\{\w+\}', v):
vname = v[2:-1]
if not vname in env:
print "Failed to find variable %s in %s" % (vname, string)
sys.exit(1)
v = env[vname]
out.append(v)
return ''.join(out)
2010-03-07 16:25:47 +03:00
@conf
2010-03-20 08:27:48 +03:00
def SUBST_ENV_VAR(ctx, varname):
2010-03-07 16:25:47 +03:00
'''Substitute an environment variable for any embedded variables'''
return subst_vars_error(ctx.env[varname], ctx.env)
2010-03-20 08:27:48 +03:00
Build.BuildContext.SUBST_ENV_VAR = SUBST_ENV_VAR
2010-03-08 12:34:15 +03:00
def ENFORCE_GROUP_ORDERING(bld):
'''enforce group ordering for the project. This
2010-03-20 08:27:48 +03:00
makes the group ordering apply only when you specify
2010-03-08 12:34:15 +03:00
a target with --target'''
if Options.options.compile_targets:
@feature('*')
def force_previous_groups(self):
if getattr(self.bld, 'enforced_group_ordering', False) == True:
return
self.bld.enforced_group_ordering = True
def group_name(g):
tm = self.bld.task_manager
return [x for x in tm.groups_names if id(tm.groups_names[x]) == id(g)][0]
2010-03-08 12:34:15 +03:00
my_id = id(self)
2010-03-08 12:34:15 +03:00
bld = self.bld
stop = None
for g in bld.task_manager.groups:
for t in g.tasks_gen:
if id(t) == my_id:
stop = id(g)
debug('group: Forcing up to group %s for target %s',
group_name(g), self.name or self.target)
2010-03-08 12:34:15 +03:00
break
if stop != None:
break
if stop is None:
return
2010-03-08 12:34:15 +03:00
for g in bld.task_manager.groups:
if id(g) == stop:
break
debug('group: Forcing group %s', group_name(g))
for t in g.tasks_gen:
if getattr(t, 'forced_groups', False) != True:
debug('group: Posting %s', t.name or t.target)
t.forced_groups = True
2010-03-08 12:34:15 +03:00
t.post()
Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
def recursive_dirlist(dir, relbase, pattern=None):
'''recursive directory list'''
ret = []
for f in os.listdir(dir):
f2 = dir + '/' + f
if os.path.isdir(f2):
ret.extend(recursive_dirlist(f2, relbase))
else:
if pattern and not fnmatch.fnmatch(f, pattern):
continue
ret.append(os_path_relpath(f2, relbase))
return ret
def mkdir_p(dir):
'''like mkdir -p'''
if os.path.isdir(dir):
return
mkdir_p(os.path.dirname(dir))
os.mkdir(dir)
2010-03-19 11:49:42 +03:00
def SUBST_VARS_RECURSIVE(string, env):
'''recursively expand variables'''
if string is None:
return string
limit=100
while (string.find('${') != -1 and limit > 0):
string = subst_vars_error(string, env)
2010-03-19 11:49:42 +03:00
limit -= 1
return string
@conf
def EXPAND_VARIABLES(ctx, varstr, vars=None):
'''expand variables from a user supplied dictionary
This is most useful when you pass vars=locals() to expand
all your local variables in strings
'''
if isinstance(varstr, list):
ret = []
for s in varstr:
ret.append(EXPAND_VARIABLES(ctx, s, vars=vars))
return ret
import Environment
env = Environment.Environment()
ret = varstr
# substitute on user supplied dict if avaiilable
if vars is not None:
for v in vars.keys():
env[v] = vars[v]
ret = SUBST_VARS_RECURSIVE(ret, env)
# if anything left, subst on the environment as well
if ret.find('${') != -1:
ret = SUBST_VARS_RECURSIVE(ret, ctx.env)
# make sure there is nothing left. Also check for the common
# typo of $( instead of ${
if ret.find('${') != -1 or ret.find('$(') != -1:
print('Failed to substitute all variables in varstr=%s' % ret)
sys.exit(1)
return ret
Build.BuildContext.EXPAND_VARIABLES = EXPAND_VARIABLES
2010-03-19 11:49:42 +03:00
def RUN_COMMAND(cmd,
env=None,
shell=False):
'''run a external command, return exit code or signal'''
if env:
2010-03-19 11:49:42 +03:00
cmd = SUBST_VARS_RECURSIVE(cmd, env)
status = os.system(cmd)
if os.WIFEXITED(status):
return os.WEXITSTATUS(status)
if os.WIFSIGNALED(status):
return - os.WTERMSIG(status)
print "Unknown exit reason %d for command: %s" (status, cmd)
return -1
# make sure we have md5. some systems don't have it
try:
from hashlib import md5
except:
try:
import md5
except:
import Constants
Constants.SIG_NIL = hash('abcd')
class replace_md5(object):
def __init__(self):
self.val = None
def update(self, val):
self.val = hash((self.val, val))
def digest(self):
return str(self.val)
def hexdigest(self):
return self.digest().encode('hex')
2010-03-25 04:26:50 +03:00
def replace_h_file(filename):
f = open(filename, 'rb')
m = replace_md5()
while (filename):
filename = f.read(100000)
m.update(filename)
f.close()
return m.digest()
2010-03-25 04:26:50 +03:00
Utils.md5 = replace_md5
Task.md5 = replace_md5
2010-03-25 04:26:50 +03:00
Utils.h_file = replace_h_file
def LOAD_ENVIRONMENT():
'''load the configuration environment, allowing access to env vars
from new commands'''
import Environment
env = Environment.Environment()
env.load('bin/c4che/default.cache.py')
return env
def IS_NEWER(bld, file1, file2):
'''return True if file1 is newer than file2'''
t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
return t1 > t2
Build.BuildContext.IS_NEWER = IS_NEWER