2010-02-26 14:21:50 +03:00
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
2010-04-09 15:12:02 +04:00
import Build , os , sys , Options , Utils , Task , re , fnmatch , Logs
2010-02-28 09:34:43 +03:00
from TaskGen import feature , before
2010-02-26 14:21:50 +03:00
from Configure import conf
from Logs import debug
2010-03-07 07:17:46 +03:00
import shlex
2010-02-26 14:21:50 +03:00
2010-03-20 08:27:48 +03:00
# TODO: make this a --option
2010-02-26 14:21:50 +03:00
LIB_PATH = " shared "
2010-02-28 09:34:43 +03:00
2010-03-24 08:25:37 +03:00
@conf
2010-02-28 09:34:43 +03:00
def SET_TARGET_TYPE ( ctx , target , value ) :
2010-03-28 15:01:04 +04:00
''' set the target type of a target '''
2010-02-28 09:34:43 +03:00
cache = LOCAL_CACHE ( ctx , ' TARGET_TYPE ' )
2010-04-12 10:21:21 +04:00
if target in cache and cache [ target ] != ' EMPTY ' :
2010-04-15 08:43:43 +04:00
Logs . error ( " ERROR: Target ' %s ' in directory %s re-defined as %s - was %s " % ( target ,
ctx . curdir ,
value , cache [ target ] ) )
sys . exit ( 1 )
2010-02-28 09:34:43 +03:00
LOCAL_CACHE_SET ( ctx , ' TARGET_TYPE ' , target , value )
debug ( " task_gen: Target ' %s ' created of type ' %s ' in %s " % ( target , value , ctx . curdir ) )
return True
2010-03-20 09:10:51 +03:00
def GET_TARGET_TYPE ( ctx , target ) :
''' get target type from cache '''
cache = LOCAL_CACHE ( ctx , ' TARGET_TYPE ' )
if not target in cache :
return None
return cache [ target ]
2010-02-26 14:21:50 +03:00
######################################################
# this is used as a decorator to make functions only
# run once. Based on the idea from
# http://stackoverflow.com/questions/815110/is-there-a-decorator-to-simply-cache-function-return-values
runonce_ret = { }
def runonce ( function ) :
2010-03-28 15:01:04 +04:00
def runonce_wrapper ( * args ) :
2010-02-26 14:21:50 +03:00
if args in runonce_ret :
return runonce_ret [ args ]
else :
ret = function ( * args )
runonce_ret [ args ] = ret
return ret
2010-03-28 15:01:04 +04:00
return runonce_wrapper
2010-02-26 14:21:50 +03:00
2010-03-25 06:20:45 +03:00
def ADD_LD_LIBRARY_PATH ( path ) :
''' add something to LD_LIBRARY_PATH '''
if ' LD_LIBRARY_PATH ' in os . environ :
oldpath = os . environ [ ' LD_LIBRARY_PATH ' ]
else :
oldpath = ' '
newpath = oldpath . split ( ' : ' )
if not path in newpath :
newpath . append ( path )
os . environ [ ' LD_LIBRARY_PATH ' ] = ' : ' . join ( newpath )
2010-03-28 15:01:04 +04:00
2010-03-18 15:47:48 +03:00
def install_rpath ( bld ) :
''' the rpath value for installation '''
2010-03-23 17:00:48 +03:00
bld . env [ ' RPATH ' ] = [ ]
bld . env [ ' RPATH_ST ' ] = [ ]
2010-03-20 15:41:15 +03:00
if bld . env . RPATH_ON_INSTALL :
2010-03-18 15:47:48 +03:00
return [ ' -Wl,-rpath= %s /lib ' % bld . env . PREFIX ]
return [ ]
2010-02-26 14:21:50 +03:00
2010-03-23 17:00:48 +03:00
def build_rpath ( bld ) :
''' the rpath value for build '''
2010-04-08 15:46:20 +04:00
rpath = os . path . normpath ( ' %s / %s ' % ( bld . env . BUILD_DIRECTORY , LIB_PATH ) )
2010-03-23 17:00:48 +03:00
bld . env [ ' RPATH ' ] = [ ]
bld . env [ ' RPATH_ST ' ] = [ ]
if bld . env . RPATH_ON_BUILD :
return [ ' -Wl,-rpath= %s ' % rpath ]
2010-03-25 06:20:45 +03:00
ADD_LD_LIBRARY_PATH ( rpath )
2010-03-23 17:00:48 +03:00
return [ ]
2010-02-26 14:21:50 +03:00
@conf
def LOCAL_CACHE ( ctx , name ) :
2010-03-28 15:01:04 +04:00
''' return a named build cache dictionary, used to store
state inside other functions '''
2010-02-26 14:21:50 +03:00
if name in ctx . env :
return ctx . env [ name ]
ctx . env [ name ] = { }
return ctx . env [ name ]
@conf
def LOCAL_CACHE_SET ( ctx , cachename , key , value ) :
2010-03-28 15:01:04 +04:00
''' set a value in a local cache '''
2010-02-26 14:21:50 +03:00
cache = LOCAL_CACHE ( ctx , cachename )
cache [ key ] = value
2010-03-28 15:01:04 +04:00
2010-02-26 14:21:50 +03:00
@conf
def ASSERT ( ctx , expression , msg ) :
2010-03-28 15:01:04 +04:00
''' a build assert call '''
2010-02-26 14:21:50 +03:00
if not expression :
2010-04-09 15:12:02 +04:00
Logs . error ( " ERROR: %s \n " % msg )
2010-02-26 14:21:50 +03:00
raise AssertionError
Build . BuildContext . ASSERT = ASSERT
2010-03-28 15:01:04 +04:00
2010-02-26 14:21:50 +03:00
def SUBDIR ( bld , subdir , list ) :
2010-03-28 15:01:04 +04:00
''' create a list of files by pre-pending each with a subdir name '''
2010-02-26 14:21:50 +03:00
ret = ' '
2010-03-20 08:27:48 +03:00
for l in TO_LIST ( list ) :
2010-03-17 12:12:16 +03:00
ret = ret + os . path . normpath ( os . path . join ( subdir , l ) ) + ' '
2010-02-26 14:21:50 +03:00
return ret
Build . BuildContext . SUBDIR = SUBDIR
2010-03-28 15:01:04 +04:00
2010-02-26 14:21:50 +03:00
def dict_concat ( d1 , d2 ) :
2010-03-28 15:01:04 +04:00
''' concatenate two dictionaries d1 += d2 '''
2010-02-26 14:21:50 +03:00
for t in d2 :
if t not in d1 :
d1 [ t ] = d2 [ t ]
2010-03-28 15:01:04 +04:00
2010-02-26 14:21:50 +03:00
def exec_command ( self , cmd , * * kw ) :
2010-03-28 15:01:04 +04:00
''' this overrides the ' waf -v ' debug output to be in a nice
unix like format instead of a python list .
Thanks to ita on #waf for this'''
2010-02-26 14:21:50 +03:00
import Utils , Logs
_cmd = cmd
if isinstance ( cmd , list ) :
_cmd = ' ' . join ( cmd )
debug ( ' runner: %s ' % _cmd )
if self . log :
self . log . write ( ' %s \n ' % cmd )
kw [ ' log ' ] = self . log
try :
if not kw . get ( ' cwd ' , None ) :
kw [ ' cwd ' ] = self . cwd
except AttributeError :
self . cwd = kw [ ' cwd ' ] = self . bldnode . abspath ( )
return Utils . exec_command ( cmd , * * kw )
Build . BuildContext . exec_command = exec_command
def ADD_COMMAND ( opt , name , function ) :
2010-03-28 15:01:04 +04:00
''' add a new top level command to waf '''
2010-02-26 14:21:50 +03:00
Utils . g_module . __dict__ [ name ] = function
opt . name = function
Options . Handler . ADD_COMMAND = ADD_COMMAND
2010-03-20 08:27:48 +03:00
@feature ( ' cc ' , ' cshlib ' , ' cprogram ' )
2010-03-17 13:53:29 +03:00
@before ( ' apply_core ' , ' exec_rule ' )
2010-02-28 09:34:43 +03:00
def process_depends_on ( self ) :
''' The new depends_on attribute for build rules
allow us to specify a dependency on output from
a source generation rule '''
if getattr ( self , ' depends_on ' , None ) :
lst = self . to_list ( self . depends_on )
for x in lst :
y = self . bld . name_to_obj ( x , self . env )
2010-03-09 00:17:26 +03:00
self . bld . ASSERT ( y is not None , " Failed to find dependency %s of %s " % ( x , self . name ) )
2010-02-28 09:34:43 +03:00
y . post ( )
2010-03-17 13:46:38 +03:00
if getattr ( y , ' more_includes ' , None ) :
self . includes + = " " + y . more_includes
2010-03-17 12:12:16 +03:00
os_path_relpath = getattr ( os . path , ' relpath ' , None )
if os_path_relpath is None :
# Python < 2.6 does not have os.path.relpath, provide a replacement
# (imported from Python2.6.5~rc2)
def os_path_relpath ( path , start ) :
""" Return a relative version of a path """
start_list = os . path . abspath ( start ) . split ( " / " )
path_list = os . path . abspath ( path ) . split ( " / " )
2010-03-17 13:46:38 +03:00
2010-03-17 12:12:16 +03:00
# Work out how much of the filepath is shared by start and path.
i = len ( os . path . commonprefix ( [ start_list , path_list ] ) )
rel_list = [ ' .. ' ] * ( len ( start_list ) - i ) + path_list [ i : ]
if not rel_list :
return start
return os . path . join ( * rel_list )
2010-03-17 13:46:38 +03:00
2010-03-20 08:27:48 +03:00
def unique_list ( seq ) :
''' return a uniquified list in the same order as the existing list '''
seen = { }
result = [ ]
for item in seq :
if item in seen : continue
seen [ item ] = True
result . append ( item )
return result
2010-03-28 15:01:04 +04:00
2010-03-20 08:27:48 +03:00
def TO_LIST ( str ) :
2010-03-07 07:17:46 +03:00
''' Split a list, preserving quoted strings and existing lists '''
2010-03-23 20:48:32 +03:00
if str is None :
return [ ]
2010-03-07 07:17:46 +03:00
if isinstance ( str , list ) :
return str
2010-03-20 08:27:48 +03:00
lst = str . split ( )
# the string may have had quotes in it, now we
# check if we did have quotes, and use the slower shlex
# if we need to
for e in lst :
if e [ 0 ] == ' " ' :
return shlex . split ( str )
return lst
2010-03-07 16:25:47 +03:00
2010-03-27 13:29:18 +03:00
def subst_vars_error ( string , env ) :
''' substitute vars, throw an error if a variable is not defined '''
lst = re . split ( ' ( \ $ \ { \ w+ \ }) ' , string )
out = [ ]
for v in lst :
if re . match ( ' \ $ \ { \ w+ \ } ' , v ) :
vname = v [ 2 : - 1 ]
if not vname in env :
2010-04-09 15:12:02 +04:00
Logs . error ( " Failed to find variable %s in %s " % ( vname , string ) )
2010-03-31 11:04:18 +04:00
sys . exit ( 1 )
2010-03-27 13:29:18 +03:00
v = env [ vname ]
out . append ( v )
return ' ' . join ( out )
2010-03-28 15:01:04 +04:00
2010-03-07 16:25:47 +03:00
@conf
2010-03-20 08:27:48 +03:00
def SUBST_ENV_VAR ( ctx , varname ) :
2010-03-07 16:25:47 +03:00
''' Substitute an environment variable for any embedded variables '''
2010-03-27 13:29:18 +03:00
return subst_vars_error ( ctx . env [ varname ] , ctx . env )
2010-03-20 08:27:48 +03:00
Build . BuildContext . SUBST_ENV_VAR = SUBST_ENV_VAR
2010-03-08 12:34:15 +03:00
def ENFORCE_GROUP_ORDERING ( bld ) :
''' enforce group ordering for the project. This
2010-03-20 08:27:48 +03:00
makes the group ordering apply only when you specify
2010-03-08 12:34:15 +03:00
a target with - - target '''
if Options . options . compile_targets :
@feature ( ' * ' )
def force_previous_groups ( self ) :
2010-03-30 13:21:21 +04:00
if getattr ( self . bld , ' enforced_group_ordering ' , False ) == True :
return
self . bld . enforced_group_ordering = True
def group_name ( g ) :
tm = self . bld . task_manager
return [ x for x in tm . groups_names if id ( tm . groups_names [ x ] ) == id ( g ) ] [ 0 ]
2010-03-08 12:34:15 +03:00
2010-03-30 13:21:21 +04:00
my_id = id ( self )
2010-03-08 12:34:15 +03:00
bld = self . bld
stop = None
for g in bld . task_manager . groups :
for t in g . tasks_gen :
if id ( t ) == my_id :
stop = id ( g )
2010-04-02 04:55:07 +04:00
debug ( ' group: Forcing up to group %s for target %s ' ,
group_name ( g ) , self . name or self . target )
2010-03-08 12:34:15 +03:00
break
2010-03-30 13:21:21 +04:00
if stop != None :
break
if stop is None :
return
2010-03-08 12:34:15 +03:00
2010-03-30 13:21:21 +04:00
for g in bld . task_manager . groups :
if id ( g ) == stop :
break
debug ( ' group: Forcing group %s ' , group_name ( g ) )
for t in g . tasks_gen :
if getattr ( t , ' forced_groups ' , False ) != True :
debug ( ' group: Posting %s ' , t . name or t . target )
t . forced_groups = True
2010-03-08 12:34:15 +03:00
t . post ( )
Build . BuildContext . ENFORCE_GROUP_ORDERING = ENFORCE_GROUP_ORDERING
2010-03-17 12:12:16 +03:00
2010-03-29 10:12:37 +04:00
def recursive_dirlist ( dir , relbase , pattern = None ) :
2010-03-17 12:12:16 +03:00
''' recursive directory list '''
ret = [ ]
for f in os . listdir ( dir ) :
f2 = dir + ' / ' + f
if os . path . isdir ( f2 ) :
ret . extend ( recursive_dirlist ( f2 , relbase ) )
else :
2010-03-29 10:12:37 +04:00
if pattern and not fnmatch . fnmatch ( f , pattern ) :
continue
2010-03-17 12:12:16 +03:00
ret . append ( os_path_relpath ( f2 , relbase ) )
return ret
def mkdir_p ( dir ) :
''' like mkdir -p '''
if os . path . isdir ( dir ) :
return
mkdir_p ( os . path . dirname ( dir ) )
os . mkdir ( dir )
2010-03-19 06:25:50 +03:00
2010-03-28 15:01:04 +04:00
2010-03-19 11:49:42 +03:00
def SUBST_VARS_RECURSIVE ( string , env ) :
''' recursively expand variables '''
if string is None :
return string
limit = 100
while ( string . find ( ' $ { ' ) != - 1 and limit > 0 ) :
2010-03-27 13:29:18 +03:00
string = subst_vars_error ( string , env )
2010-03-19 11:49:42 +03:00
limit - = 1
return string
2010-03-28 15:01:04 +04:00
2010-03-25 01:23:10 +03:00
@conf
def EXPAND_VARIABLES ( ctx , varstr , vars = None ) :
''' expand variables from a user supplied dictionary
This is most useful when you pass vars = locals ( ) to expand
all your local variables in strings
'''
if isinstance ( varstr , list ) :
ret = [ ]
for s in varstr :
ret . append ( EXPAND_VARIABLES ( ctx , s , vars = vars ) )
return ret
import Environment
env = Environment . Environment ( )
ret = varstr
# substitute on user supplied dict if avaiilable
if vars is not None :
for v in vars . keys ( ) :
env [ v ] = vars [ v ]
ret = SUBST_VARS_RECURSIVE ( ret , env )
# if anything left, subst on the environment as well
2010-03-27 01:46:27 +03:00
if ret . find ( ' $ { ' ) != - 1 :
2010-03-25 01:23:10 +03:00
ret = SUBST_VARS_RECURSIVE ( ret , ctx . env )
# make sure there is nothing left. Also check for the common
# typo of $( instead of ${
if ret . find ( ' $ { ' ) != - 1 or ret . find ( ' $( ' ) != - 1 :
2010-04-09 15:12:02 +04:00
Logs . error ( ' Failed to substitute all variables in varstr= %s ' % ret )
2010-03-31 11:04:18 +04:00
sys . exit ( 1 )
2010-03-25 01:23:10 +03:00
return ret
Build . BuildContext . EXPAND_VARIABLES = EXPAND_VARIABLES
2010-03-19 11:49:42 +03:00
2010-03-19 06:25:50 +03:00
def RUN_COMMAND ( cmd ,
env = None ,
shell = False ) :
''' run a external command, return exit code or signal '''
if env :
2010-03-19 11:49:42 +03:00
cmd = SUBST_VARS_RECURSIVE ( cmd , env )
2010-03-19 06:25:50 +03:00
status = os . system ( cmd )
if os . WIFEXITED ( status ) :
return os . WEXITSTATUS ( status )
if os . WIFSIGNALED ( status ) :
return - os . WTERMSIG ( status )
2010-04-09 15:12:02 +04:00
Logs . error ( " Unknown exit reason %d for command: %s " ( status , cmd ) )
2010-03-19 06:25:50 +03:00
return - 1
2010-03-24 08:37:41 +03:00
# make sure we have md5. some systems don't have it
try :
from hashlib import md5
except :
try :
import md5
except :
import Constants
Constants . SIG_NIL = hash ( ' abcd ' )
class replace_md5 ( object ) :
def __init__ ( self ) :
self . val = None
def update ( self , val ) :
self . val = hash ( ( self . val , val ) )
def digest ( self ) :
return str ( self . val )
def hexdigest ( self ) :
return self . digest ( ) . encode ( ' hex ' )
2010-03-25 04:26:50 +03:00
def replace_h_file ( filename ) :
2010-03-25 04:18:16 +03:00
f = open ( filename , ' rb ' )
m = replace_md5 ( )
while ( filename ) :
filename = f . read ( 100000 )
m . update ( filename )
f . close ( )
return m . digest ( )
2010-03-25 04:26:50 +03:00
Utils . md5 = replace_md5
2010-03-25 04:35:18 +03:00
Task . md5 = replace_md5
2010-03-25 04:26:50 +03:00
Utils . h_file = replace_h_file
2010-03-27 01:46:27 +03:00
2010-03-29 10:12:37 +04:00
def LOAD_ENVIRONMENT ( ) :
''' load the configuration environment, allowing access to env vars
from new commands '''
import Environment
env = Environment . Environment ( )
2010-04-04 15:14:21 +04:00
env . load ( ' .lock-wscript ' )
env . load ( env . blddir + ' /c4che/default.cache.py ' )
2010-03-29 10:12:37 +04:00
return env
2010-03-29 14:28:49 +04:00
def IS_NEWER ( bld , file1 , file2 ) :
''' return True if file1 is newer than file2 '''
t1 = os . stat ( os . path . join ( bld . curdir , file1 ) ) . st_mtime
t2 = os . stat ( os . path . join ( bld . curdir , file2 ) ) . st_mtime
return t1 > t2
Build . BuildContext . IS_NEWER = IS_NEWER
2010-04-02 05:26:27 +04:00
2010-04-04 07:08:05 +04:00
@conf
def RECURSE ( ctx , directory ) :
''' recurse into a directory, relative to the curdir or top level '''
try :
visited_dirs = ctx . visited_dirs
except :
visited_dirs = ctx . visited_dirs = set ( )
d = os . path . join ( ctx . curdir , directory )
if os . path . exists ( d ) :
abspath = os . path . abspath ( d )
else :
abspath = os . path . abspath ( os . path . join ( Utils . g_module . srcdir , directory ) )
ctxclass = ctx . __class__ . __name__
key = ctxclass + ' : ' + abspath
if key in visited_dirs :
# already done it
return
visited_dirs . add ( key )
relpath = os_path_relpath ( abspath , ctx . curdir )
if ctxclass == ' Handler ' :
return ctx . sub_options ( relpath )
if ctxclass == ' ConfigurationContext ' :
return ctx . sub_config ( relpath )
if ctxclass == ' BuildContext ' :
return ctx . add_subdirs ( relpath )
2010-04-09 15:12:02 +04:00
Logs . error ( ' Unknown RECURSE context class ' , ctxclass )
2010-04-04 07:08:05 +04:00
raise
Options . Handler . RECURSE = RECURSE
Build . BuildContext . RECURSE = RECURSE
2010-04-14 17:37:47 +04:00
def CHECK_MAKEFLAGS ( bld ) :
''' check for MAKEFLAGS environment variable in case we are being
called from a Makefile try to honor a few make command line flags '''
if not ' WAF_MAKE ' in os . environ :
return
makeflags = os . environ . get ( ' MAKEFLAGS ' )
jobs_set = False
for opt in makeflags . split ( ) :
# options can come either as -x or as x
2010-04-18 13:41:47 +04:00
if opt [ 0 : 2 ] == ' V= ' :
Options . options . verbose = Logs . verbose = int ( opt [ 2 : ] )
if Logs . verbose > 0 :
Logs . zones = [ ' runner ' ]
if Logs . verbose > 2 :
Logs . zones = [ ' * ' ]
elif opt [ 0 ] . isupper ( ) and opt . find ( ' = ' ) != - 1 :
loc = opt . find ( ' = ' )
setattr ( Options . options , opt [ 0 : loc ] , opt [ loc + 1 : ] )
elif opt [ 0 ] != ' - ' :
2010-04-14 17:37:47 +04:00
for v in opt :
if v == ' j ' :
jobs_set = True
elif v == ' k ' :
Options . options . keep = True
elif opt == ' -j ' :
jobs_set = True
elif opt == ' -k ' :
Options . options . keep = True
if not jobs_set :
# default to one job
Options . options . jobs = 1
Build . BuildContext . CHECK_MAKEFLAGS = CHECK_MAKEFLAGS
2010-04-18 06:43:15 +04:00
option_groups = { }
def option_group ( opt , name ) :
''' find or create an option group '''
global option_groups
if name in option_groups :
return option_groups [ name ]
gr = opt . add_option_group ( name )
option_groups [ name ] = gr
return gr
Options . Handler . option_group = option_group
def save_file ( filename , contents , create_dir = False ) :
''' save data to a file '''
if create_dir :
mkdir_p ( os . path . dirname ( filename ) )
try :
f = open ( filename , ' w ' )
f . write ( contents )
f . close ( )
except :
return False
return True
def load_file ( filename ) :
''' return contents of a file '''
try :
f = open ( filename , ' r ' )
r = f . read ( )
f . close ( )
except :
return None
return r