1
0
mirror of https://github.com/samba-team/samba.git synced 2025-07-30 19:42:05 +03:00

wafsamba: Expand tabs.

This commit is contained in:
Jelmer Vernooij
2012-02-09 13:08:31 +01:00
parent 744ed53a62
commit 88a041aa21
10 changed files with 529 additions and 529 deletions

View File

@ -18,111 +18,111 @@ preprocessor_flag = '-MD'
@feature('cc')
@before('apply_core')
def add_mmd_cc(self):
if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
self.env.append_value('CCFLAGS', preprocessor_flag)
if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
self.env.append_value('CCFLAGS', preprocessor_flag)
@feature('cxx')
@before('apply_core')
def add_mmd_cxx(self):
if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
self.env.append_value('CXXFLAGS', preprocessor_flag)
if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
self.env.append_value('CXXFLAGS', preprocessor_flag)
def scan(self):
"the scanner does not do anything initially"
nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
names = []
return (nodes, names)
"the scanner does not do anything initially"
nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
names = []
return (nodes, names)
re_o = re.compile("\.o$")
re_src = re.compile("^(\.\.)[\\/](.*)$")
def post_run(self):
# The following code is executed by threads, it is not safe, so a lock is needed...
# The following code is executed by threads, it is not safe, so a lock is needed...
if getattr(self, 'cached', None):
return Task.Task.post_run(self)
if getattr(self, 'cached', None):
return Task.Task.post_run(self)
name = self.outputs[0].abspath(self.env)
name = re_o.sub('.d', name)
txt = Utils.readf(name)
#os.unlink(name)
name = self.outputs[0].abspath(self.env)
name = re_o.sub('.d', name)
txt = Utils.readf(name)
#os.unlink(name)
txt = txt.replace('\\\n', '')
txt = txt.replace('\\\n', '')
lst = txt.strip().split(':')
val = ":".join(lst[1:])
val = val.split()
lst = txt.strip().split(':')
val = ":".join(lst[1:])
val = val.split()
nodes = []
bld = self.generator.bld
nodes = []
bld = self.generator.bld
f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
for x in val:
if os.path.isabs(x):
f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
for x in val:
if os.path.isabs(x):
if not preproc.go_absolute:
continue
if not preproc.go_absolute:
continue
lock.acquire()
try:
node = bld.root.find_resource(x)
finally:
lock.release()
else:
g = re.search(re_src, x)
if g:
x = g.group(2)
lock.acquire()
try:
node = bld.bldnode.parent.find_resource(x)
finally:
lock.release()
else:
g = re.search(f, x)
if g:
x = g.group(2)
lock.acquire()
try:
node = bld.srcnode.find_resource(x)
finally:
lock.release()
lock.acquire()
try:
node = bld.root.find_resource(x)
finally:
lock.release()
else:
g = re.search(re_src, x)
if g:
x = g.group(2)
lock.acquire()
try:
node = bld.bldnode.parent.find_resource(x)
finally:
lock.release()
else:
g = re.search(f, x)
if g:
x = g.group(2)
lock.acquire()
try:
node = bld.srcnode.find_resource(x)
finally:
lock.release()
if id(node) == id(self.inputs[0]):
# ignore the source file, it is already in the dependencies
# this way, successful config tests may be retrieved from the cache
continue
if id(node) == id(self.inputs[0]):
# ignore the source file, it is already in the dependencies
# this way, successful config tests may be retrieved from the cache
continue
if not node:
raise ValueError('could not find %r for %r' % (x, self))
else:
nodes.append(node)
if not node:
raise ValueError('could not find %r for %r' % (x, self))
else:
nodes.append(node)
Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
bld.node_deps[self.unique_id()] = nodes
bld.raw_deps[self.unique_id()] = []
bld.node_deps[self.unique_id()] = nodes
bld.raw_deps[self.unique_id()] = []
try:
del self.cache_sig
except:
pass
try:
del self.cache_sig
except:
pass
Task.Task.post_run(self)
Task.Task.post_run(self)
import Constants, Utils
def sig_implicit_deps(self):
try:
return Task.Task.sig_implicit_deps(self)
except Utils.WafError:
return Constants.SIG_NIL
try:
return Task.Task.sig_implicit_deps(self)
except Utils.WafError:
return Constants.SIG_NIL
for name in 'cc cxx'.split():
try:
cls = Task.TaskBase.classes[name]
except KeyError:
pass
else:
cls.post_run = post_run
cls.scan = scan
cls.sig_implicit_deps = sig_implicit_deps
try:
cls = Task.TaskBase.classes[name]
except KeyError:
pass
else:
cls.post_run = post_run
cls.scan = scan
cls.sig_implicit_deps = sig_implicit_deps

View File

@ -14,50 +14,50 @@ c_compiler['hpux'] = ['gcc', 'generic_cc']
@conftest
def find_generic_cc(conf):
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('generic_cc was not found')
cc = conf.cmd_to_list(cc)
v['CC'] = cc
v['CC_NAME'] = 'generic'
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('generic_cc was not found')
cc = conf.cmd_to_list(cc)
v['CC'] = cc
v['CC_NAME'] = 'generic'
@conftest
def generic_cc_common_flags(conf):
v = conf.env
v = conf.env
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', '']
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', '']
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
# v['SHLIB_MARKER'] = '-Bdynamic'
# v['STATICLIB_MARKER'] = '-Bstatic'
# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
# v['SHLIB_MARKER'] = '-Bdynamic'
# v['STATICLIB_MARKER'] = '-Bstatic'
# program
v['program_PATTERN'] = '%s'
# program
v['program_PATTERN'] = '%s'
# shared library
# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
# v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# shared library
# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
# v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
# v['staticlib_LINKFLAGS'] = ['-Bstatic']
# v['staticlib_PATTERN'] = 'lib%s.a'
# static lib
# v['staticlib_LINKFLAGS'] = ['-Bstatic']
# v['staticlib_PATTERN'] = 'lib%s.a'
detect = '''
find_generic_cc

View File

@ -13,57 +13,57 @@ c_compiler['irix'] = ['gcc', 'irixcc']
@conftest
def find_irixcc(conf):
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('irixcc was not found')
cc = conf.cmd_to_list(cc)
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('irixcc was not found')
cc = conf.cmd_to_list(cc)
try:
if Utils.cmd_output(cc + ['-version']) != '':
conf.fatal('irixcc %r was not found' % cc)
except ValueError:
conf.fatal('irixcc -v could not be executed')
try:
if Utils.cmd_output(cc + ['-version']) != '':
conf.fatal('irixcc %r was not found' % cc)
except ValueError:
conf.fatal('irixcc -v could not be executed')
v['CC'] = cc
v['CC_NAME'] = 'irix'
v['CC'] = cc
v['CC_NAME'] = 'irix'
@conftest
def irixcc_common_flags(conf):
v = conf.env
v = conf.env
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', '']
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', '']
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
# v['SHLIB_MARKER'] = '-Bdynamic'
# v['STATICLIB_MARKER'] = '-Bstatic'
# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
# v['SHLIB_MARKER'] = '-Bdynamic'
# v['STATICLIB_MARKER'] = '-Bstatic'
# program
v['program_PATTERN'] = '%s'
# program
v['program_PATTERN'] = '%s'
# shared library
# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
# v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# shared library
# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
# v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
# v['staticlib_LINKFLAGS'] = ['-Bstatic']
# v['staticlib_PATTERN'] = 'lib%s.a'
# static lib
# v['staticlib_LINKFLAGS'] = ['-Bstatic']
# v['staticlib_PATTERN'] = 'lib%s.a'
detect = '''
find_irixcc

View File

@ -22,195 +22,195 @@ GAP = 15
run_old = threading.Thread.run
def run(*args, **kwargs):
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
threading.Thread.run = run
class TaskConsumer(object):
consumers = 1
consumers = 1
def process(tsk):
m = tsk.master
if m.stop:
m.out.put(tsk)
return
m = tsk.master
if m.stop:
m.out.put(tsk)
return
try:
tsk.generator.bld.printout(tsk.display())
if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
# actual call to task's run() function
else: ret = tsk.call_run()
except Exception, e:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
try:
tsk.generator.bld.printout(tsk.display())
if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
# actual call to task's run() function
else: ret = tsk.call_run()
except Exception, e:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
# TODO cleanup
m.error_handler(tsk)
m.out.put(tsk)
return
# TODO cleanup
m.error_handler(tsk)
m.out.put(tsk)
return
if ret:
tsk.err_code = ret
tsk.hasrun = CRASHED
else:
try:
tsk.post_run()
except Utils.WafError:
pass
except Exception:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
else:
tsk.hasrun = SUCCESS
if tsk.hasrun != SUCCESS:
m.error_handler(tsk)
if ret:
tsk.err_code = ret
tsk.hasrun = CRASHED
else:
try:
tsk.post_run()
except Utils.WafError:
pass
except Exception:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
else:
tsk.hasrun = SUCCESS
if tsk.hasrun != SUCCESS:
m.error_handler(tsk)
m.out.put(tsk)
m.out.put(tsk)
class Parallel(object):
"""
keep the consumer threads busy, and avoid consuming cpu cycles
when no more tasks can be added (end of the build, etc)
"""
def __init__(self, bld, j=2):
"""
keep the consumer threads busy, and avoid consuming cpu cycles
when no more tasks can be added (end of the build, etc)
"""
def __init__(self, bld, j=2):
# number of consumers
self.numjobs = j
# number of consumers
self.numjobs = j
self.manager = bld.task_manager
self.manager.current_group = 0
self.manager = bld.task_manager
self.manager.current_group = 0
self.total = self.manager.total()
self.total = self.manager.total()
# tasks waiting to be processed - IMPORTANT
self.outstanding = []
self.maxjobs = MAXJOBS
# tasks waiting to be processed - IMPORTANT
self.outstanding = []
self.maxjobs = MAXJOBS
# tasks that are awaiting for another task to complete
self.frozen = []
# tasks that are awaiting for another task to complete
self.frozen = []
# tasks returned by the consumers
self.out = Queue(0)
# tasks returned by the consumers
self.out = Queue(0)
self.count = 0 # tasks not in the producer area
self.count = 0 # tasks not in the producer area
self.processed = 1 # progress indicator
self.processed = 1 # progress indicator
self.stop = False # error condition to stop the build
self.error = False # error flag
self.stop = False # error condition to stop the build
self.error = False # error flag
def get_next(self):
"override this method to schedule the tasks in a particular order"
if not self.outstanding:
return None
return self.outstanding.pop(0)
def get_next(self):
"override this method to schedule the tasks in a particular order"
if not self.outstanding:
return None
return self.outstanding.pop(0)
def postpone(self, tsk):
"override this method to schedule the tasks in a particular order"
# TODO consider using a deque instead
if random.randint(0, 1):
self.frozen.insert(0, tsk)
else:
self.frozen.append(tsk)
def postpone(self, tsk):
"override this method to schedule the tasks in a particular order"
# TODO consider using a deque instead
if random.randint(0, 1):
self.frozen.insert(0, tsk)
else:
self.frozen.append(tsk)
def refill_task_list(self):
"called to set the next group of tasks"
def refill_task_list(self):
"called to set the next group of tasks"
while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
self.get_out()
while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
self.get_out()
while not self.outstanding:
if self.count:
self.get_out()
while not self.outstanding:
if self.count:
self.get_out()
if self.frozen:
self.outstanding += self.frozen
self.frozen = []
elif not self.count:
(jobs, tmp) = self.manager.get_next_set()
if jobs != None: self.maxjobs = jobs
if tmp: self.outstanding += tmp
break
if self.frozen:
self.outstanding += self.frozen
self.frozen = []
elif not self.count:
(jobs, tmp) = self.manager.get_next_set()
if jobs != None: self.maxjobs = jobs
if tmp: self.outstanding += tmp
break
def get_out(self):
"the tasks that are put to execute are all collected using get_out"
ret = self.out.get()
self.manager.add_finished(ret)
if not self.stop and getattr(ret, 'more_tasks', None):
self.outstanding += ret.more_tasks
self.total += len(ret.more_tasks)
self.count -= 1
def get_out(self):
"the tasks that are put to execute are all collected using get_out"
ret = self.out.get()
self.manager.add_finished(ret)
if not self.stop and getattr(ret, 'more_tasks', None):
self.outstanding += ret.more_tasks
self.total += len(ret.more_tasks)
self.count -= 1
def error_handler(self, tsk):
"by default, errors make the build stop (not thread safe so be careful)"
if not Options.options.keep:
self.stop = True
self.error = True
def error_handler(self, tsk):
"by default, errors make the build stop (not thread safe so be careful)"
if not Options.options.keep:
self.stop = True
self.error = True
def start(self):
"execute the tasks"
def start(self):
"execute the tasks"
while not self.stop:
while not self.stop:
self.refill_task_list()
self.refill_task_list()
# consider the next task
tsk = self.get_next()
if not tsk:
if self.count:
# tasks may add new ones after they are run
continue
else:
# no tasks to run, no tasks running, time to exit
break
# consider the next task
tsk = self.get_next()
if not tsk:
if self.count:
# tasks may add new ones after they are run
continue
else:
# no tasks to run, no tasks running, time to exit
break
if tsk.hasrun:
# if the task is marked as "run", just skip it
self.processed += 1
self.manager.add_finished(tsk)
continue
if tsk.hasrun:
# if the task is marked as "run", just skip it
self.processed += 1
self.manager.add_finished(tsk)
continue
try:
st = tsk.runnable_status()
except Exception, e:
self.processed += 1
if self.stop and not Options.options.keep:
tsk.hasrun = SKIPPED
self.manager.add_finished(tsk)
continue
self.error_handler(tsk)
self.manager.add_finished(tsk)
tsk.hasrun = EXCEPTION
tsk.err_msg = Utils.ex_stack()
continue
try:
st = tsk.runnable_status()
except Exception, e:
self.processed += 1
if self.stop and not Options.options.keep:
tsk.hasrun = SKIPPED
self.manager.add_finished(tsk)
continue
self.error_handler(tsk)
self.manager.add_finished(tsk)
tsk.hasrun = EXCEPTION
tsk.err_msg = Utils.ex_stack()
continue
if st == ASK_LATER:
self.postpone(tsk)
elif st == SKIP_ME:
self.processed += 1
tsk.hasrun = SKIPPED
self.manager.add_finished(tsk)
else:
# run me: put the task in ready queue
tsk.position = (self.processed, self.total)
self.count += 1
self.processed += 1
tsk.master = self
if st == ASK_LATER:
self.postpone(tsk)
elif st == SKIP_ME:
self.processed += 1
tsk.hasrun = SKIPPED
self.manager.add_finished(tsk)
else:
# run me: put the task in ready queue
tsk.position = (self.processed, self.total)
self.count += 1
self.processed += 1
tsk.master = self
process(tsk)
process(tsk)
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
while self.error and self.count:
self.get_out()
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
while self.error and self.count:
self.get_out()
#print loop
assert (self.count == 0 or self.stop)
#print loop
assert (self.count == 0 or self.stop)
# enable nothreads

View File

@ -46,7 +46,7 @@ Build.BuildContext.SAMBA3_IS_ENABLED_MODULE = SAMBA3_IS_ENABLED_MODULE
def s3_fix_kwargs(bld, kwargs):
'''fix the build arguments for s3 build rules to include the
necessary includes, subdir and cflags options '''
necessary includes, subdir and cflags options '''
s3dir = os.path.join(bld.env.srcdir, 'source3')
s3reldir = os_path_relpath(s3dir, bld.curdir)
@ -92,23 +92,23 @@ def s3_fix_kwargs(bld, kwargs):
# these wrappers allow for mixing of S3 and S4 build rules in the one build
def SAMBA3_LIBRARY(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_LIBRARY(name, *args, **kwargs)
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_LIBRARY(name, *args, **kwargs)
Build.BuildContext.SAMBA3_LIBRARY = SAMBA3_LIBRARY
def SAMBA3_MODULE(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_MODULE(name, *args, **kwargs)
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_MODULE(name, *args, **kwargs)
Build.BuildContext.SAMBA3_MODULE = SAMBA3_MODULE
def SAMBA3_SUBSYSTEM(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_SUBSYSTEM(name, *args, **kwargs)
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_SUBSYSTEM(name, *args, **kwargs)
Build.BuildContext.SAMBA3_SUBSYSTEM = SAMBA3_SUBSYSTEM
def SAMBA3_BINARY(bld, name, *args, **kwargs):
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_BINARY(name, *args, **kwargs)
s3_fix_kwargs(bld, kwargs)
return bld.SAMBA_BINARY(name, *args, **kwargs)
Build.BuildContext.SAMBA3_BINARY = SAMBA3_BINARY
def SAMBA3_PYTHON(bld, name, *args, **kwargs):

View File

@ -295,7 +295,7 @@ def CHECK_PERL_MANPAGE(conf, msg=None, section=None):
dest.write("""
use ExtUtils::MakeMaker;
WriteMakefile(
'NAME' => 'WafTest',
'NAME' => 'WafTest',
'EXE_FILES' => [ 'WafTest' ]
);
""")

View File

@ -12,50 +12,50 @@ import preproc, Task
@feature('cc', 'cxx')
@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
def apply_incpaths(self):
lst = []
lst = []
try:
kak = self.bld.kak
except AttributeError:
kak = self.bld.kak = {}
try:
kak = self.bld.kak
except AttributeError:
kak = self.bld.kak = {}
# TODO move the uselib processing out of here
for lib in self.to_list(self.uselib):
for path in self.env['CPPPATH_' + lib]:
if not path in lst:
lst.append(path)
if preproc.go_absolute:
for path in preproc.standard_includes:
if not path in lst:
lst.append(path)
# TODO move the uselib processing out of here
for lib in self.to_list(self.uselib):
for path in self.env['CPPPATH_' + lib]:
if not path in lst:
lst.append(path)
if preproc.go_absolute:
for path in preproc.standard_includes:
if not path in lst:
lst.append(path)
for path in self.to_list(self.includes):
if not path in lst:
if preproc.go_absolute or path[0] != '/': #os.path.isabs(path):
lst.append(path)
else:
self.env.prepend_value('CPPPATH', path)
for path in self.to_list(self.includes):
if not path in lst:
if preproc.go_absolute or path[0] != '/': #os.path.isabs(path):
lst.append(path)
else:
self.env.prepend_value('CPPPATH', path)
for path in lst:
node = None
if path[0] == '/': # os.path.isabs(path):
if preproc.go_absolute:
node = self.bld.root.find_dir(path)
elif path[0] == '#':
node = self.bld.srcnode
if len(path) > 1:
try:
node = kak[path]
except KeyError:
kak[path] = node = node.find_dir(path[1:])
else:
try:
node = kak[(self.path.id, path)]
except KeyError:
kak[(self.path.id, path)] = node = self.path.find_dir(path)
for path in lst:
node = None
if path[0] == '/': # os.path.isabs(path):
if preproc.go_absolute:
node = self.bld.root.find_dir(path)
elif path[0] == '#':
node = self.bld.srcnode
if len(path) > 1:
try:
node = kak[path]
except KeyError:
kak[path] = node = node.find_dir(path[1:])
else:
try:
node = kak[(self.path.id, path)]
except KeyError:
kak[(self.path.id, path)] = node = self.path.find_dir(path)
if node:
self.env.append_value('INC_PATHS', node)
if node:
self.env.append_value('INC_PATHS', node)
@feature('cc')
@after('apply_incpaths')
@ -91,13 +91,13 @@ def apply_obj_vars_cc(self):
import Node, Environment
def vari(self):
return "default"
return "default"
Environment.Environment.variant = vari
def variant(self, env):
if not env: return 0
elif self.id & 3 == Node.FILE: return 0
else: return "default"
if not env: return 0
elif self.id & 3 == Node.FILE: return 0
else: return "default"
Node.Node.variant = variant
@ -113,13 +113,13 @@ def create_task(self, name, src=None, tgt=None):
TaskGen.task_gen.create_task = create_task
def hash_constraints(self):
a = self.attr
sum = hash((str(a('before', '')),
a = self.attr
sum = hash((str(a('before', '')),
str(a('after', '')),
str(a('ext_in', '')),
str(a('ext_out', '')),
self.__class__.maxjobs))
return sum
return sum
Task.TaskBase.hash_constraints = hash_constraints
@ -129,37 +129,37 @@ Task.TaskBase.hash_constraints = hash_constraints
# @extension(cc.EXT_CC)
# def c_hook(self, node):
# task = self.create_task('cc', node, node.change_ext('.o'))
# try:
# self.compiled_tasks.append(task)
# except AttributeError:
# raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
# task = self.create_task('cc', node, node.change_ext('.o'))
# try:
# self.compiled_tasks.append(task)
# except AttributeError:
# raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
# bld = self.bld
# try:
# dc = bld.dc
# except AttributeError:
# dc = bld.dc = {}
# bld = self.bld
# try:
# dc = bld.dc
# except AttributeError:
# dc = bld.dc = {}
# if task.outputs[0].id in dc:
# raise Utils.WafError('Samba, you are doing it wrong %r %s %s' % (task.outputs, task.generator, dc[task.outputs[0].id].generator))
# else:
# dc[task.outputs[0].id] = task
# if task.outputs[0].id in dc:
# raise Utils.WafError('Samba, you are doing it wrong %r %s %s' % (task.outputs, task.generator, dc[task.outputs[0].id].generator))
# else:
# dc[task.outputs[0].id] = task
# return task
# return task
def suncc_wrap(cls):
'''work around a problem with cc on solaris not handling module aliases
which have empty libs'''
if getattr(cls, 'solaris_wrap', False):
return
cls.solaris_wrap = True
oldrun = cls.run
def run(self):
if self.env.CC_NAME == "sun" and not self.inputs:
self.env = self.env.copy()
self.env.append_value('LINKFLAGS', '-')
return oldrun(self)
cls.run = run
'''work around a problem with cc on solaris not handling module aliases
which have empty libs'''
if getattr(cls, 'solaris_wrap', False):
return
cls.solaris_wrap = True
oldrun = cls.run
def run(self):
if self.env.CC_NAME == "sun" and not self.inputs:
self.env = self.env.copy()
self.env.append_value('LINKFLAGS', '-')
return oldrun(self)
cls.run = run
suncc_wrap(Task.TaskBase.classes['cc_link'])

View File

@ -31,8 +31,8 @@ def SAMBA_PIDL(bld, pname, source,
'--client' : 'ndr_%s_c.c ndr_%s_c.h',
'--python' : 'py_%s.c',
'--tdr-parser' : 'tdr_%s.c tdr_%s.h',
'--dcom-proxy' : '%s_p.c',
'--com-header' : 'com_%s.h'
'--dcom-proxy' : '%s_p.c',
'--com-header' : 'com_%s.h'
}
table_header_idx = None

View File

@ -8,148 +8,148 @@ from Constants import *
from samba_utils import *
def run_task(t, k):
'''run a single build task'''
ret = t.run()
if ret:
raise Utils.WafError("Failed to build %s: %u" % (k, ret))
'''run a single build task'''
ret = t.run()
if ret:
raise Utils.WafError("Failed to build %s: %u" % (k, ret))
def run_named_build_task(cmd):
'''run a named build task, matching the cmd name using fnmatch
wildcards against inputs and outputs of all build tasks'''
bld = fake_build_environment(info=False)
found = False
cwd_node = bld.root.find_dir(os.getcwd())
top_node = bld.root.find_dir(bld.srcnode.abspath())
'''run a named build task, matching the cmd name using fnmatch
wildcards against inputs and outputs of all build tasks'''
bld = fake_build_environment(info=False)
found = False
cwd_node = bld.root.find_dir(os.getcwd())
top_node = bld.root.find_dir(bld.srcnode.abspath())
cmd = os.path.normpath(cmd)
cmd = os.path.normpath(cmd)
# cope with builds of bin/*/*
if os.path.islink(cmd):
cmd = os_path_relpath(os.readlink(cmd), os.getcwd())
# cope with builds of bin/*/*
if os.path.islink(cmd):
cmd = os_path_relpath(os.readlink(cmd), os.getcwd())
if cmd[0:12] == "bin/default/":
cmd = cmd[12:]
if cmd[0:12] == "bin/default/":
cmd = cmd[12:]
for g in bld.task_manager.groups:
for attr in ['outputs', 'inputs']:
for t in g.tasks:
s = getattr(t, attr, [])
for k in s:
relpath1 = k.relpath_gen(cwd_node)
relpath2 = k.relpath_gen(top_node)
if (fnmatch.fnmatch(relpath1, cmd) or
fnmatch.fnmatch(relpath2, cmd)):
t.position = [0,0]
print(t.display())
run_task(t, k)
found = True
for g in bld.task_manager.groups:
for attr in ['outputs', 'inputs']:
for t in g.tasks:
s = getattr(t, attr, [])
for k in s:
relpath1 = k.relpath_gen(cwd_node)
relpath2 = k.relpath_gen(top_node)
if (fnmatch.fnmatch(relpath1, cmd) or
fnmatch.fnmatch(relpath2, cmd)):
t.position = [0,0]
print(t.display())
run_task(t, k)
found = True
if not found:
raise Utils.WafError("Unable to find build target matching %s" % cmd)
if not found:
raise Utils.WafError("Unable to find build target matching %s" % cmd)
def rewrite_compile_targets():
'''cope with the bin/ form of compile target'''
if not Options.options.compile_targets:
return
'''cope with the bin/ form of compile target'''
if not Options.options.compile_targets:
return
bld = fake_build_environment(info=False)
targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
tlist = []
bld = fake_build_environment(info=False)
targets = LOCAL_CACHE(bld, 'TARGET_TYPE')
tlist = []
for t in Options.options.compile_targets.split(','):
if not os.path.islink(t):
tlist.append(t)
continue
link = os.readlink(t)
list = link.split('/')
for name in [list[-1], '/'.join(list[-2:])]:
if name in targets:
tlist.append(name)
continue
Options.options.compile_targets = ",".join(tlist)
for t in Options.options.compile_targets.split(','):
if not os.path.islink(t):
tlist.append(t)
continue
link = os.readlink(t)
list = link.split('/')
for name in [list[-1], '/'.join(list[-2:])]:
if name in targets:
tlist.append(name)
continue
Options.options.compile_targets = ",".join(tlist)
def wildcard_main(missing_cmd_fn):
'''this replaces main from Scripting, allowing us to override the
behaviour for unknown commands
'''this replaces main from Scripting, allowing us to override the
behaviour for unknown commands
If a unknown command is found, then missing_cmd_fn() is called with
the name of the requested command
'''
Scripting.commands = Options.arg_line[:]
If a unknown command is found, then missing_cmd_fn() is called with
the name of the requested command
'''
Scripting.commands = Options.arg_line[:]
# rewrite the compile targets to cope with the bin/xx form
rewrite_compile_targets()
# rewrite the compile targets to cope with the bin/xx form
rewrite_compile_targets()
while Scripting.commands:
x = Scripting.commands.pop(0)
while Scripting.commands:
x = Scripting.commands.pop(0)
ini = datetime.datetime.now()
if x == 'configure':
fun = Scripting.configure
elif x == 'build':
fun = Scripting.build
else:
fun = getattr(Utils.g_module, x, None)
ini = datetime.datetime.now()
if x == 'configure':
fun = Scripting.configure
elif x == 'build':
fun = Scripting.build
else:
fun = getattr(Utils.g_module, x, None)
# this is the new addition on top of main from Scripting.py
if not fun:
missing_cmd_fn(x)
break
# this is the new addition on top of main from Scripting.py
if not fun:
missing_cmd_fn(x)
break
ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
try:
fun(ctx)
except TypeError:
fun()
else:
fun(ctx)
if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
try:
fun(ctx)
except TypeError:
fun()
else:
fun(ctx)
ela = ''
if not Options.options.progress_bar:
ela = ' (%s)' % Utils.get_elapsed_time(ini)
ela = ''
if not Options.options.progress_bar:
ela = ' (%s)' % Utils.get_elapsed_time(ini)
if x != 'init' and x != 'shutdown':
Logs.info('%r finished successfully%s' % (x, ela))
if x != 'init' and x != 'shutdown':
Logs.info('%r finished successfully%s' % (x, ela))
if not Scripting.commands and x != 'shutdown':
Scripting.commands.append('shutdown')
if not Scripting.commands and x != 'shutdown':
Scripting.commands.append('shutdown')
def fake_build_environment(info=True, flush=False):
"""create all the tasks for the project, but do not run the build
return the build context in use"""
bld = getattr(Utils.g_module, 'build_context', Utils.Context)()
bld = Scripting.check_configured(bld)
"""create all the tasks for the project, but do not run the build
return the build context in use"""
bld = getattr(Utils.g_module, 'build_context', Utils.Context)()
bld = Scripting.check_configured(bld)
Options.commands['install'] = False
Options.commands['uninstall'] = False
Options.is_install = False
Options.commands['install'] = False
Options.commands['uninstall'] = False
Options.is_install = False
bld.is_install = 0 # False
bld.is_install = 0 # False
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError("Project not configured (run 'waf configure' first)")
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError("Project not configured (run 'waf configure' first)")
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
if info:
Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
if info:
Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
bld.pre_build()
if flush:
bld.flush()
return bld
bld.pre_build()
if flush:
bld.flush()
return bld

View File

@ -13,57 +13,57 @@ c_compiler['osf1V'] = ['gcc', 'tru64cc']
@conftest
def find_tru64cc(conf):
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('tru64cc was not found')
cc = conf.cmd_to_list(cc)
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('tru64cc was not found')
cc = conf.cmd_to_list(cc)
try:
if not Utils.cmd_output(cc + ['-V']):
conf.fatal('tru64cc %r was not found' % cc)
except ValueError:
conf.fatal('tru64cc -V could not be executed')
try:
if not Utils.cmd_output(cc + ['-V']):
conf.fatal('tru64cc %r was not found' % cc)
except ValueError:
conf.fatal('tru64cc -V could not be executed')
v['CC'] = cc
v['CC_NAME'] = 'tru64'
v['CC'] = cc
v['CC_NAME'] = 'tru64'
@conftest
def tru64cc_common_flags(conf):
v = conf.env
v = conf.env
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', '']
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', '']
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
# v['SHLIB_MARKER'] = '-Bdynamic'
# v['STATICLIB_MARKER'] = '-Bstatic'
# v['SONAME_ST'] = '-Wl,-h -Wl,%s'
# v['SHLIB_MARKER'] = '-Bdynamic'
# v['STATICLIB_MARKER'] = '-Bstatic'
# program
v['program_PATTERN'] = '%s'
# program
v['program_PATTERN'] = '%s'
# shared library
# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
v['shlib_LINKFLAGS'] = ['-shared']
v['shlib_PATTERN'] = 'lib%s.so'
# shared library
# v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
v['shlib_LINKFLAGS'] = ['-shared']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
# v['staticlib_LINKFLAGS'] = ['-Bstatic']
# v['staticlib_PATTERN'] = 'lib%s.a'
# static lib
# v['staticlib_LINKFLAGS'] = ['-Bstatic']
# v['staticlib_PATTERN'] = 'lib%s.a'
detect = '''
find_tru64cc