1
0
mirror of https://github.com/samba-team/samba.git synced 2025-01-11 05:18:09 +03:00

build: tidy up the wafsamba rules a bit

use python string conventions for function comments
This commit is contained in:
Andrew Tridgell 2010-03-28 22:01:04 +11:00
parent d8b91f1120
commit b7ff06b904
9 changed files with 84 additions and 170 deletions

View File

@ -5,15 +5,4 @@ obeyed
TODO: TODO:
- fix deps for --target see http://wiki.samba.org/index.php/Waf
- cache project rules calculation
- make pidl rules depend on full pidl sources
- make script rules depend on the scripts
- add waf test
- s3 build
- merged build
- etags
- rest of old make targets
- better Makefile waf wrapper
-

View File

@ -363,10 +363,9 @@ def CHECK_CFLAGS(conf, cflags):
msg="Checking compiler accepts %s" % cflags) msg="Checking compiler accepts %s" % cflags)
#################################################
# return True if a configuration option was found
@conf @conf
def CONFIG_SET(conf, option): def CONFIG_SET(conf, option):
'''return True if a configuration option was found'''
return (option in conf.env) and (conf.env[option] != ()) return (option in conf.env) and (conf.env[option] != ())
Build.BuildContext.CONFIG_SET = CONFIG_SET Build.BuildContext.CONFIG_SET = CONFIG_SET
@ -414,21 +413,23 @@ def CHECK_LIB(conf, libs, mandatory=False, empty_decl=True):
return ret return ret
###########################################################
# check that the functions in 'list' are available in 'library'
# if they are, then make that library available as a dependency
#
# if the library is not available and mandatory==True, then
# raise an error.
#
# If the library is not available and mandatory==False, then
# add the library to the list of dependencies to remove from
# build rules
#
# optionally check for the functions first in libc
@conf @conf
def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False, def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False,
headers=None, link=None, empty_decl=True): headers=None, link=None, empty_decl=True):
"""
check that the functions in 'list' are available in 'library'
if they are, then make that library available as a dependency
if the library is not available and mandatory==True, then
raise an error.
If the library is not available and mandatory==False, then
add the library to the list of dependencies to remove from
build rules
optionally check for the functions first in libc
"""
remaining = TO_LIST(list) remaining = TO_LIST(list)
liblist = TO_LIST(library) liblist = TO_LIST(library)
@ -466,16 +467,16 @@ def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False,
return ret return ret
@conf @conf
def IN_LAUNCH_DIR(conf): def IN_LAUNCH_DIR(conf):
'''return True if this rule is being run from the launch directory''' '''return True if this rule is being run from the launch directory'''
return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir) return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir)
#################################################
# write out config.h in the right directory
@conf @conf
def SAMBA_CONFIG_H(conf, path=None): def SAMBA_CONFIG_H(conf, path=None):
'''write out config.h in the right directory'''
# we don't want to produce a config.h in places like lib/replace # we don't want to produce a config.h in places like lib/replace
# when we are building projects that depend on lib/replace # when we are building projects that depend on lib/replace
if not IN_LAUNCH_DIR(conf): if not IN_LAUNCH_DIR(conf):
@ -495,10 +496,9 @@ def SAMBA_CONFIG_H(conf, path=None):
conf.write_config_header(path) conf.write_config_header(path)
##############################################################
# setup a configurable path
@conf @conf
def CONFIG_PATH(conf, name, default): def CONFIG_PATH(conf, name, default):
'''setup a configurable path'''
if not name in conf.env: if not name in conf.env:
if default[0] == '/': if default[0] == '/':
conf.env[name] = default conf.env[name] = default
@ -523,18 +523,18 @@ def ADD_CFLAGS(conf, flags, testflags=False):
conf.env['EXTRA_CFLAGS'].extend(TO_LIST(flags)) conf.env['EXTRA_CFLAGS'].extend(TO_LIST(flags))
##############################################################
# add some extra include directories to all builds
@conf @conf
def ADD_EXTRA_INCLUDES(conf, includes): def ADD_EXTRA_INCLUDES(conf, includes):
'''add some extra include directories to all builds'''
if not 'EXTRA_INCLUDES' in conf.env: if not 'EXTRA_INCLUDES' in conf.env:
conf.env['EXTRA_INCLUDES'] = [] conf.env['EXTRA_INCLUDES'] = []
conf.env['EXTRA_INCLUDES'].extend(TO_LIST(includes)) conf.env['EXTRA_INCLUDES'].extend(TO_LIST(includes))
##############################################################
# work out the current flags. local flags are added first
def CURRENT_CFLAGS(bld, target, cflags): def CURRENT_CFLAGS(bld, target, cflags):
'''work out the current flags. local flags are added first'''
if not 'EXTRA_CFLAGS' in bld.env: if not 'EXTRA_CFLAGS' in bld.env:
list = [] list = []
else: else:
@ -546,8 +546,8 @@ def CURRENT_CFLAGS(bld, target, cflags):
@conf @conf
def CHECK_CC_ENV(conf): def CHECK_CC_ENV(conf):
'''trim whitespaces from 'CC'. """trim whitespaces from 'CC'.
The build farm sometimes puts a space at the start''' The build farm sometimes puts a space at the start"""
if os.environ.get('CC'): if os.environ.get('CC'):
conf.env.CC = TO_LIST(os.environ.get('CC')) conf.env.CC = TO_LIST(os.environ.get('CC'))
if len(conf.env.CC) == 1: if len(conf.env.CC) == 1:

View File

@ -3,8 +3,8 @@
import Build import Build
from samba_utils import * from samba_utils import *
# rule for heimdal prototype generation
def HEIMDAL_AUTOPROTO(bld, header, source, options=None, group='prototypes'): def HEIMDAL_AUTOPROTO(bld, header, source, options=None, group='prototypes'):
'''rule for heimdal prototype generation'''
bld.SET_BUILD_GROUP(group) bld.SET_BUILD_GROUP(group)
if options is None: if options is None:
options='-q -P comment -o' options='-q -P comment -o'
@ -17,13 +17,15 @@ def HEIMDAL_AUTOPROTO(bld, header, source, options=None, group='prototypes'):
t.env.OPTIONS = options t.env.OPTIONS = options
Build.BuildContext.HEIMDAL_AUTOPROTO = HEIMDAL_AUTOPROTO Build.BuildContext.HEIMDAL_AUTOPROTO = HEIMDAL_AUTOPROTO
# rule for private heimdal prototype generation
def HEIMDAL_AUTOPROTO_PRIVATE(bld, header, source): def HEIMDAL_AUTOPROTO_PRIVATE(bld, header, source):
'''rule for private heimdal prototype generation'''
bld.HEIMDAL_AUTOPROTO(header, source, options='-q -P comment -p') bld.HEIMDAL_AUTOPROTO(header, source, options='-q -P comment -p')
Build.BuildContext.HEIMDAL_AUTOPROTO_PRIVATE = HEIMDAL_AUTOPROTO_PRIVATE Build.BuildContext.HEIMDAL_AUTOPROTO_PRIVATE = HEIMDAL_AUTOPROTO_PRIVATE
# rule for samba prototype generation
def SAMBA_AUTOPROTO(bld, header, source): def SAMBA_AUTOPROTO(bld, header, source):
'''rule for samba prototype generation'''
bld.SET_BUILD_GROUP('prototypes') bld.SET_BUILD_GROUP('prototypes')
bld( bld(
source = source, source = source,

View File

@ -42,10 +42,15 @@ def BUNDLED_EXTENSION_DEFAULT(opt, extension, noextenion=''):
Options.Handler.BUNDLED_EXTENSION_DEFAULT = BUNDLED_EXTENSION_DEFAULT Options.Handler.BUNDLED_EXTENSION_DEFAULT = BUNDLED_EXTENSION_DEFAULT
@runonce @runonce
@conf @conf
def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0', def CHECK_BUNDLED_SYSTEM(conf, libname, minversion='0.0.0',
checkfunctions=None, headers=None): checkfunctions=None, headers=None):
'''check if a library is available as a system library.
this first tries via pkg-config, then if that fails
tries by testing for a specified function in the specified lib
'''
if 'ALL' in conf.env.BUNDLED_LIBS or libname in conf.env.BUNDLED_LIBS: if 'ALL' in conf.env.BUNDLED_LIBS or libname in conf.env.BUNDLED_LIBS:
return False return False
found = 'FOUND_SYSTEMLIB_%s' % libname found = 'FOUND_SYSTEMLIB_%s' % libname

View File

@ -66,15 +66,6 @@ def build_dependencies(self):
the full dependency list for a target until we have all of the targets declared. the full dependency list for a target until we have all of the targets declared.
''' '''
# we need to link against:
# 1) any direct system libs
# 2) any indirect system libs that come from subsystem dependencies
# 3) any direct local libs
# 4) any indirect local libs that come from subsystem dependencies
# 5) any direct objects
# 6) any indirect objects that come from subsystem dependencies
if self.samba_type in ['LIBRARY', 'BINARY', 'PYTHON']: if self.samba_type in ['LIBRARY', 'BINARY', 'PYTHON']:
self.uselib = list(self.final_syslibs) self.uselib = list(self.final_syslibs)
self.uselib_local = list(self.final_libs) self.uselib_local = list(self.final_libs)
@ -240,20 +231,6 @@ def check_duplicate_sources(bld, tgt_list):
seen = set() seen = set()
'''
# this was useful for finding problems with the autogenerated rules
for t in tgt_list:
base_list = set()
sources = TO_LIST(getattr(t, 'source', ''))
for s in sources:
bname = os.path.basename(s)
if bname in base_list:
print "Suspicious duplicate name %s in %s" % (bname, t.sname)
continue
base_list.add(bname)
'''
for t in tgt_list: for t in tgt_list:
obj_sources = getattr(t, 'source', '') obj_sources = getattr(t, 'source', '')
tpath = os_path_relpath(t.path.abspath(bld.env), t.env['BUILD_DIRECTORY'] + '/default') tpath = os_path_relpath(t.path.abspath(bld.env), t.env['BUILD_DIRECTORY'] + '/default')
@ -638,6 +615,7 @@ def calculate_final_deps(bld, tgt_list, loops):
debug('deps: removed duplicate dependencies') debug('deps: removed duplicate dependencies')
###################################################################### ######################################################################
# this provides a way to save our dependency calculations between runs # this provides a way to save our dependency calculations between runs
savedeps_version = 3 savedeps_version = 3

View File

@ -13,7 +13,5 @@ def SAMBA_MKVERSION(bld, target):
shell=True, shell=True,
on_results=True, on_results=True,
before="cc") before="cc")
# force this rule to be constructed now
t.post()
Build.BuildContext.SAMBA_MKVERSION = SAMBA_MKVERSION Build.BuildContext.SAMBA_MKVERSION = SAMBA_MKVERSION

View File

@ -121,6 +121,7 @@ def collect(self):
def SAMBA_PIDL_TABLES(bld, name, target): def SAMBA_PIDL_TABLES(bld, name, target):
'''generate the pidl NDR tables file'''
headers = bld.env.PIDL_HEADERS headers = bld.env.PIDL_HEADERS
bld.SET_BUILD_GROUP('main') bld.SET_BUILD_GROUP('main')
t = bld( t = bld(

View File

@ -11,20 +11,9 @@ import shlex
LIB_PATH="shared" LIB_PATH="shared"
##########################################################
# create a node with a new name, based on an existing node
def NEW_NODE(node, name):
ret = node.parent.find_or_declare([name])
ASSERT(node, ret is not None, "Unable to find new target with name '%s' from '%s'" % (
name, node.name))
return ret
#############################################################
# set a value in a local cache
# return False if it's already set
@conf @conf
def SET_TARGET_TYPE(ctx, target, value): def SET_TARGET_TYPE(ctx, target, value):
'''set the target type of a target'''
cache = LOCAL_CACHE(ctx, 'TARGET_TYPE') cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
if target in cache: if target in cache:
ASSERT(ctx, cache[target] == value, ASSERT(ctx, cache[target] == value,
@ -50,14 +39,14 @@ def GET_TARGET_TYPE(ctx, target):
# http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values # http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
runonce_ret = {} runonce_ret = {}
def runonce(function): def runonce(function):
def wrapper(*args): def runonce_wrapper(*args):
if args in runonce_ret: if args in runonce_ret:
return runonce_ret[args] return runonce_ret[args]
else: else:
ret = function(*args) ret = function(*args)
runonce_ret[args] = ret runonce_ret[args] = ret
return ret return ret
return wrapper return runonce_wrapper
def ADD_LD_LIBRARY_PATH(path): def ADD_LD_LIBRARY_PATH(path):
@ -71,6 +60,7 @@ def ADD_LD_LIBRARY_PATH(path):
newpath.append(path) newpath.append(path)
os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath) os.environ['LD_LIBRARY_PATH'] = ':'.join(newpath)
def install_rpath(bld): def install_rpath(bld):
'''the rpath value for installation''' '''the rpath value for installation'''
bld.env['RPATH'] = [] bld.env['RPATH'] = []
@ -91,54 +81,52 @@ def build_rpath(bld):
return [] return []
#############################################################
# return a named build cache dictionary, used to store
# state inside the following functions
@conf @conf
def LOCAL_CACHE(ctx, name): def LOCAL_CACHE(ctx, name):
'''return a named build cache dictionary, used to store
state inside other functions'''
if name in ctx.env: if name in ctx.env:
return ctx.env[name] return ctx.env[name]
ctx.env[name] = {} ctx.env[name] = {}
return ctx.env[name] return ctx.env[name]
#############################################################
# set a value in a local cache
@conf @conf
def LOCAL_CACHE_SET(ctx, cachename, key, value): def LOCAL_CACHE_SET(ctx, cachename, key, value):
'''set a value in a local cache'''
cache = LOCAL_CACHE(ctx, cachename) cache = LOCAL_CACHE(ctx, cachename)
cache[key] = value cache[key] = value
#############################################################
# a build assert call
@conf @conf
def ASSERT(ctx, expression, msg): def ASSERT(ctx, expression, msg):
'''a build assert call'''
if not expression: if not expression:
sys.stderr.write("ERROR: %s\n" % msg) sys.stderr.write("ERROR: %s\n" % msg)
raise AssertionError raise AssertionError
Build.BuildContext.ASSERT = ASSERT Build.BuildContext.ASSERT = ASSERT
################################################################
# create a list of files by pre-pending each with a subdir name
def SUBDIR(bld, subdir, list): def SUBDIR(bld, subdir, list):
'''create a list of files by pre-pending each with a subdir name'''
ret = '' ret = ''
for l in TO_LIST(list): for l in TO_LIST(list):
ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' ' ret = ret + os.path.normpath(os.path.join(subdir, l)) + ' '
return ret return ret
Build.BuildContext.SUBDIR = SUBDIR Build.BuildContext.SUBDIR = SUBDIR
#######################################################
# d1 += d2
def dict_concat(d1, d2): def dict_concat(d1, d2):
'''concatenate two dictionaries d1 += d2'''
for t in d2: for t in d2:
if t not in d1: if t not in d1:
d1[t] = d2[t] d1[t] = d2[t]
############################################################
# this overrides the 'waf -v' debug output to be in a nice
# unix like format instead of a python list.
# Thanks to ita on #waf for this
def exec_command(self, cmd, **kw): def exec_command(self, cmd, **kw):
'''this overrides the 'waf -v' debug output to be in a nice
unix like format instead of a python list.
Thanks to ita on #waf for this'''
import Utils, Logs import Utils, Logs
_cmd = cmd _cmd = cmd
if isinstance(cmd, list): if isinstance(cmd, list):
@ -156,9 +144,8 @@ def exec_command(self, cmd, **kw):
Build.BuildContext.exec_command = exec_command Build.BuildContext.exec_command = exec_command
##########################################################
# add a new top level command to waf
def ADD_COMMAND(opt, name, function): def ADD_COMMAND(opt, name, function):
'''add a new top level command to waf'''
Utils.g_module.__dict__[name] = function Utils.g_module.__dict__[name] = function
opt.name = function opt.name = function
Options.Handler.ADD_COMMAND = ADD_COMMAND Options.Handler.ADD_COMMAND = ADD_COMMAND
@ -180,36 +167,6 @@ def process_depends_on(self):
self.includes += " " + y.more_includes self.includes += " " + y.more_includes
#@feature('cprogram', 'cc', 'cshlib')
#@before('apply_core')
#def process_generated_dependencies(self):
# '''Ensure that any dependent source generation happens
# before any task that requires the output'''
# if getattr(self , 'depends_on', None):
# lst = self.to_list(self.depends_on)
# for x in lst:
# y = self.bld.name_to_obj(x, self.env)
# y.post()
#import TaskGen, Task
#
#old_post_run = Task.Task.post_run
#def new_post_run(self):
# self.cached = True
# return old_post_run(self)
#
#for y in ['cc', 'cxx']:
# TaskGen.classes[y].post_run = new_post_run
def ENABLE_MAGIC_ORDERING(bld):
'''enable automatic build order constraint calculation
see page 35 of the waf book'''
print "NOT Enabling magic ordering"
#bld.use_the_magic()
Build.BuildContext.ENABLE_MAGIC_ORDERING = ENABLE_MAGIC_ORDERING
os_path_relpath = getattr(os.path, 'relpath', None) os_path_relpath = getattr(os.path, 'relpath', None)
if os_path_relpath is None: if os_path_relpath is None:
# Python < 2.6 does not have os.path.relpath, provide a replacement # Python < 2.6 does not have os.path.relpath, provide a replacement
@ -238,6 +195,7 @@ def unique_list(seq):
result.append(item) result.append(item)
return result return result
def TO_LIST(str): def TO_LIST(str):
'''Split a list, preserving quoted strings and existing lists''' '''Split a list, preserving quoted strings and existing lists'''
if str is None: if str is None:
@ -268,6 +226,7 @@ def subst_vars_error(string, env):
out.append(v) out.append(v)
return ''.join(out) return ''.join(out)
@conf @conf
def SUBST_ENV_VAR(ctx, varname): def SUBST_ENV_VAR(ctx, varname):
'''Substitute an environment variable for any embedded variables''' '''Substitute an environment variable for any embedded variables'''
@ -301,18 +260,6 @@ def ENFORCE_GROUP_ORDERING(bld):
t.post() t.post()
Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING Build.BuildContext.ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
# @feature('cc')
# @before('apply_lib_vars')
# def process_objects(self):
# if getattr(self, 'add_objects', None):
# lst = self.to_list(self.add_objects)
# for x in lst:
# y = self.name_to_obj(x)
# if not y:
# raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
# y.post()
# self.env.append_unique('INC_PATHS', y.env.INC_PATHS)
def recursive_dirlist(dir, relbase): def recursive_dirlist(dir, relbase):
'''recursive directory list''' '''recursive directory list'''
@ -333,6 +280,7 @@ def mkdir_p(dir):
mkdir_p(os.path.dirname(dir)) mkdir_p(os.path.dirname(dir))
os.mkdir(dir) os.mkdir(dir)
def SUBST_VARS_RECURSIVE(string, env): def SUBST_VARS_RECURSIVE(string, env):
'''recursively expand variables''' '''recursively expand variables'''
if string is None: if string is None:
@ -343,6 +291,7 @@ def SUBST_VARS_RECURSIVE(string, env):
limit -= 1 limit -= 1
return string return string
@conf @conf
def EXPAND_VARIABLES(ctx, varstr, vars=None): def EXPAND_VARIABLES(ctx, varstr, vars=None):
'''expand variables from a user supplied dictionary '''expand variables from a user supplied dictionary

View File

@ -24,10 +24,9 @@ LIB_PATH="shared"
os.putenv('PYTHONUNBUFFERED', '1') os.putenv('PYTHONUNBUFFERED', '1')
#################################################################
# create the samba build environment
@conf @conf
def SAMBA_BUILD_ENV(conf): def SAMBA_BUILD_ENV(conf):
'''create the samba build environment'''
conf.env['BUILD_DIRECTORY'] = conf.blddir conf.env['BUILD_DIRECTORY'] = conf.blddir
mkdir_p(os.path.join(conf.blddir, LIB_PATH)) mkdir_p(os.path.join(conf.blddir, LIB_PATH))
mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc')) mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc'))
@ -48,9 +47,8 @@ def SAMBA_BUILD_ENV(conf):
################################################################
# add an init_function to the list for a subsystem
def ADD_INIT_FUNCTION(bld, subsystem, target, init_function): def ADD_INIT_FUNCTION(bld, subsystem, target, init_function):
'''add an init_function to the list for a subsystem'''
if init_function is None: if init_function is None:
return return
bld.ASSERT(subsystem is not None, "You must specify a subsystem for init_function '%s'" % init_function) bld.ASSERT(subsystem is not None, "You must specify a subsystem for init_function '%s'" % init_function)
@ -61,8 +59,8 @@ def ADD_INIT_FUNCTION(bld, subsystem, target, init_function):
Build.BuildContext.ADD_INIT_FUNCTION = ADD_INIT_FUNCTION Build.BuildContext.ADD_INIT_FUNCTION = ADD_INIT_FUNCTION
################################################################# #################################################################
# define a Samba library
def SAMBA_LIBRARY(bld, libname, source, def SAMBA_LIBRARY(bld, libname, source,
deps='', deps='',
public_deps='', public_deps='',
@ -83,6 +81,7 @@ def SAMBA_LIBRARY(bld, libname, source,
install=True, install=True,
bundled_extension=True, bundled_extension=True,
enabled=True): enabled=True):
'''define a Samba library'''
if not enabled: if not enabled:
SET_TARGET_TYPE(bld, libname, 'DISABLED') SET_TARGET_TYPE(bld, libname, 'DISABLED')
@ -202,7 +201,6 @@ Build.BuildContext.SAMBA_LIBRARY = SAMBA_LIBRARY
################################################################# #################################################################
# define a Samba binary
def SAMBA_BINARY(bld, binname, source, def SAMBA_BINARY(bld, binname, source,
deps='', deps='',
includes='', includes='',
@ -223,6 +221,7 @@ def SAMBA_BINARY(bld, binname, source,
vars=None, vars=None,
install=True, install=True,
install_path=None): install_path=None):
'''define a Samba binary'''
if not SET_TARGET_TYPE(bld, binname, 'BINARY'): if not SET_TARGET_TYPE(bld, binname, 'BINARY'):
return return
@ -319,7 +318,6 @@ Build.BuildContext.SAMBA_BINARY = SAMBA_BINARY
################################################################# #################################################################
# define a Samba module.
def SAMBA_MODULE(bld, modname, source, def SAMBA_MODULE(bld, modname, source,
deps='', deps='',
includes='', includes='',
@ -333,6 +331,7 @@ def SAMBA_MODULE(bld, modname, source,
local_include=True, local_include=True,
vars=None, vars=None,
enabled=True): enabled=True):
'''define a Samba module.'''
# we add the init function regardless of whether the module # we add the init function regardless of whether the module
# is enabled or not, as we need to generate a null list if # is enabled or not, as we need to generate a null list if
@ -386,7 +385,6 @@ Build.BuildContext.SAMBA_MODULE = SAMBA_MODULE
################################################################# #################################################################
# define a Samba subsystem
def SAMBA_SUBSYSTEM(bld, modname, source, def SAMBA_SUBSYSTEM(bld, modname, source,
deps='', deps='',
public_deps='', public_deps='',
@ -409,6 +407,7 @@ def SAMBA_SUBSYSTEM(bld, modname, source,
enabled=True, enabled=True,
vars=None, vars=None,
needs_python=False): needs_python=False):
'''define a Samba subsystem'''
if not enabled: if not enabled:
SET_TARGET_TYPE(bld, modname, 'DISABLED') SET_TARGET_TYPE(bld, modname, 'DISABLED')
@ -493,33 +492,22 @@ Build.BuildContext.SAMBA_GENERATOR = SAMBA_GENERATOR
###############################################################
# add a new set of build rules from a subdirectory
# the @runonce decorator ensures we don't end up
# with duplicate rules
def BUILD_SUBDIR(bld, dir): def BUILD_SUBDIR(bld, dir):
'''add a new set of build rules from a subdirectory'''
path = os.path.normpath(bld.curdir + '/' + dir) path = os.path.normpath(bld.curdir + '/' + dir)
cache = LOCAL_CACHE(bld, 'SUBDIR_LIST') cache = LOCAL_CACHE(bld, 'SUBDIR_LIST')
if path in cache: return if path in cache: return
cache[path] = True cache[path] = True
debug("build: Processing subdirectory %s" % dir) debug("build: Processing subdirectory %s" % dir)
bld.add_subdirs(dir) bld.add_subdirs(dir)
Build.BuildContext.BUILD_SUBDIR = BUILD_SUBDIR Build.BuildContext.BUILD_SUBDIR = BUILD_SUBDIR
##########################################################
# add a new top level command to waf
def ADD_COMMAND(opt, name, function):
Utils.g_module.__dict__[name] = function
opt.name = function
Options.Handler.ADD_COMMAND = ADD_COMMAND
###########################################################
# setup build groups used to ensure that the different build
# phases happen consecutively
@runonce @runonce
def SETUP_BUILD_GROUPS(bld): def SETUP_BUILD_GROUPS(bld):
'''setup build groups used to ensure that the different build
phases happen consecutively'''
bld.p_ln = bld.srcnode # we do want to see all targets! bld.p_ln = bld.srcnode # we do want to see all targets!
bld.env['USING_BUILD_GROUPS'] = True bld.env['USING_BUILD_GROUPS'] = True
bld.add_group('setup') bld.add_group('setup')
@ -534,30 +522,32 @@ def SETUP_BUILD_GROUPS(bld):
Build.BuildContext.SETUP_BUILD_GROUPS = SETUP_BUILD_GROUPS Build.BuildContext.SETUP_BUILD_GROUPS = SETUP_BUILD_GROUPS
###########################################################
# set the current build group
def SET_BUILD_GROUP(bld, group): def SET_BUILD_GROUP(bld, group):
'''set the current build group'''
if not 'USING_BUILD_GROUPS' in bld.env: if not 'USING_BUILD_GROUPS' in bld.env:
return return
bld.set_group(group) bld.set_group(group)
Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP Build.BuildContext.SET_BUILD_GROUP = SET_BUILD_GROUP
def h_file(filename):
import stat
st = os.stat(filename)
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
m = Utils.md5()
m.update(str(st.st_mtime))
m.update(str(st.st_size))
m.update(filename)
return m.digest()
@conf @conf
def ENABLE_TIMESTAMP_DEPENDENCIES(conf): def ENABLE_TIMESTAMP_DEPENDENCIES(conf):
"""use timestamps instead of file contents for deps
this currently doesn't work"""
def h_file(filename):
import stat
st = os.stat(filename)
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
m = Utils.md5()
m.update(str(st.st_mtime))
m.update(str(st.st_size))
m.update(filename)
return m.digest()
Utils.h_file = h_file Utils.h_file = h_file
############################## ##############################
# handle the creation of links for libraries and binaries # handle the creation of links for libraries and binaries
# note that we use a relative symlink path to allow the whole tree # note that we use a relative symlink path to allow the whole tree
@ -569,6 +559,7 @@ t.quiet = True
@feature('symlink_lib') @feature('symlink_lib')
@after('apply_link') @after('apply_link')
def symlink_lib(self): def symlink_lib(self):
'''symlink a shared lib'''
tsk = self.create_task('symlink_lib', self.link_task.outputs[0]) tsk = self.create_task('symlink_lib', self.link_task.outputs[0])
# calculat the link target and put it in the environment # calculat the link target and put it in the environment
@ -598,6 +589,7 @@ t.quiet = True
@feature('symlink_bin') @feature('symlink_bin')
@after('apply_link') @after('apply_link')
def symlink_bin(self): def symlink_bin(self):
'''symlink a binary'''
if Options.is_install: if Options.is_install:
# we don't want to copy the install binary, as # we don't want to copy the install binary, as
# that has the install rpath, not the build rpath # that has the install rpath, not the build rpath