mirror of
https://github.com/samba-team/samba.git
synced 2025-03-27 22:50:26 +03:00
waf: upgrade to 2.0.18
This is required to get the new test_args parameter to conf.check, which facilitates passing arguments to configuration test programs. BUG: https://bugzilla.samba.org/show_bug.cgi?id=13846 Signed-off-by: Uri Simchoni <uri@samba.org> Reviewed-by: Andrew Bartlett <abartlet@samba.org>
This commit is contained in:
parent
0afd655e80
commit
09e282ec81
2
buildtools/bin/waf
vendored
2
buildtools/bin/waf
vendored
@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import os, sys, inspect
|
||||
|
||||
VERSION="2.0.17"
|
||||
VERSION="2.0.18"
|
||||
REVISION="x"
|
||||
GIT="x"
|
||||
INSTALL="x"
|
||||
|
@ -38,7 +38,7 @@ LIB_PATH="shared"
|
||||
|
||||
os.environ['PYTHONUNBUFFERED'] = '1'
|
||||
|
||||
if Context.HEXVERSION not in (0x2001100,):
|
||||
if Context.HEXVERSION not in (0x2001200,):
|
||||
Logs.error('''
|
||||
Please use the version of waf that comes with Samba, not
|
||||
a system installed version. See http://wiki.samba.org/index.php/Waf
|
||||
|
20
third_party/waf/waflib/Configure.py
vendored
20
third_party/waf/waflib/Configure.py
vendored
@ -524,7 +524,7 @@ def run_build(self, *k, **kw):
|
||||
Though this function returns *0* by default, the build may set an attribute named *retval* on the
|
||||
build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
|
||||
|
||||
This function also provides a limited cache. To use it, provide the following option::
|
||||
This function also features a cache which can be enabled by the following option::
|
||||
|
||||
def options(opt):
|
||||
opt.add_option('--confcache', dest='confcache', default=0,
|
||||
@ -535,10 +535,21 @@ def run_build(self, *k, **kw):
|
||||
$ waf configure --confcache
|
||||
|
||||
"""
|
||||
lst = [str(v) for (p, v) in kw.items() if p != 'env']
|
||||
h = Utils.h_list(lst)
|
||||
buf = []
|
||||
for key in sorted(kw.keys()):
|
||||
v = kw[key]
|
||||
if hasattr(v, '__call__'):
|
||||
buf.append(Utils.h_fun(v))
|
||||
else:
|
||||
buf.append(str(v))
|
||||
h = Utils.h_list(buf)
|
||||
dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
|
||||
|
||||
cachemode = kw.get('confcache', getattr(Options.options, 'confcache', None))
|
||||
|
||||
if not cachemode and os.path.exists(dir):
|
||||
shutil.rmtree(dir)
|
||||
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError:
|
||||
@ -549,7 +560,6 @@ def run_build(self, *k, **kw):
|
||||
except OSError:
|
||||
self.fatal('cannot use the configuration test folder %r' % dir)
|
||||
|
||||
cachemode = getattr(Options.options, 'confcache', None)
|
||||
if cachemode == 1:
|
||||
try:
|
||||
proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
|
||||
@ -589,7 +599,7 @@ def run_build(self, *k, **kw):
|
||||
else:
|
||||
ret = getattr(bld, 'retval', 0)
|
||||
finally:
|
||||
if cachemode == 1:
|
||||
if cachemode:
|
||||
# cache the results each time
|
||||
proj = ConfigSet.ConfigSet()
|
||||
proj['cache_run_build'] = ret
|
||||
|
6
third_party/waf/waflib/Context.py
vendored
6
third_party/waf/waflib/Context.py
vendored
@ -11,13 +11,13 @@ from waflib import Utils, Errors, Logs
|
||||
import waflib.Node
|
||||
|
||||
# the following 3 constants are updated on each new release (do not touch)
|
||||
HEXVERSION=0x2001100
|
||||
HEXVERSION=0x2001200
|
||||
"""Constant updated on new releases"""
|
||||
|
||||
WAFVERSION="2.0.17"
|
||||
WAFVERSION="2.0.18"
|
||||
"""Constant updated on new releases"""
|
||||
|
||||
WAFREVISION="6bc6cb599c702e985780e9f705b291b812123693"
|
||||
WAFREVISION="314689b8994259a84f0de0aaef74d7ce91f541ad"
|
||||
"""Git revision when the waf version is updated"""
|
||||
|
||||
ABI = 20
|
||||
|
7
third_party/waf/waflib/Scripting.py
vendored
7
third_party/waf/waflib/Scripting.py
vendored
@ -332,7 +332,12 @@ def distclean(ctx):
|
||||
else:
|
||||
remove_and_log(env.out_dir, shutil.rmtree)
|
||||
|
||||
for k in (env.out_dir, env.top_dir, env.run_dir):
|
||||
env_dirs = [env.out_dir]
|
||||
if not ctx.options.no_lock_in_top:
|
||||
env_dirs.append(env.top_dir)
|
||||
if not ctx.options.no_lock_in_run:
|
||||
env_dirs.append(env.run_dir)
|
||||
for k in env_dirs:
|
||||
p = os.path.join(k, Options.lockfile)
|
||||
remove_and_log(p, os.remove)
|
||||
|
||||
|
2
third_party/waf/waflib/TaskGen.py
vendored
2
third_party/waf/waflib/TaskGen.py
vendored
@ -905,7 +905,7 @@ def process_subst(self):
|
||||
# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
|
||||
for xt in HEADER_EXTS:
|
||||
if b.name.endswith(xt):
|
||||
tsk.ext_in = tsk.ext_in + ['.h']
|
||||
tsk.ext_out = tsk.ext_out + ['.h']
|
||||
break
|
||||
|
||||
inst_to = getattr(self, 'install_path', None)
|
||||
|
37
third_party/waf/waflib/Tools/asm.py
vendored
37
third_party/waf/waflib/Tools/asm.py
vendored
@ -34,9 +34,22 @@ Support for pure asm programs and libraries should also work::
|
||||
target = 'asmtest')
|
||||
"""
|
||||
|
||||
from waflib import Task
|
||||
import re
|
||||
from waflib import Errors, Logs, Task
|
||||
from waflib.Tools.ccroot import link_task, stlink_task
|
||||
from waflib.TaskGen import extension
|
||||
from waflib.Tools import c_preproc
|
||||
|
||||
re_lines = re.compile(
|
||||
'^[ \t]*(?:%)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef)[ \t]*(.*)\r*$',
|
||||
re.IGNORECASE | re.MULTILINE)
|
||||
|
||||
class asm_parser(c_preproc.c_parser):
|
||||
def filter_comments(self, node):
|
||||
code = node.read()
|
||||
code = c_preproc.re_nl.sub('', code)
|
||||
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
|
||||
return re_lines.findall(code)
|
||||
|
||||
class asm(Task.Task):
|
||||
"""
|
||||
@ -45,6 +58,28 @@ class asm(Task.Task):
|
||||
color = 'BLUE'
|
||||
run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
|
||||
|
||||
def scan(self):
|
||||
if self.env.ASM_NAME == 'gas':
|
||||
return c_preproc.scan(self)
|
||||
Logs.warn('There is no dependency scanner for Nasm!')
|
||||
return [[], []]
|
||||
elif self.env.ASM_NAME == 'nasm':
|
||||
Logs.warn('The Nasm dependency scanner is incomplete!')
|
||||
|
||||
try:
|
||||
incn = self.generator.includes_nodes
|
||||
except AttributeError:
|
||||
raise Errors.WafError('%r is missing the "asm" feature' % self.generator)
|
||||
|
||||
if c_preproc.go_absolute:
|
||||
nodepaths = incn
|
||||
else:
|
||||
nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)]
|
||||
|
||||
tmp = asm_parser(nodepaths)
|
||||
tmp.start(self.inputs[0], self.env)
|
||||
return (tmp.nodes, tmp.names)
|
||||
|
||||
@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
|
||||
def asm_hook(self, node):
|
||||
"""
|
||||
|
6
third_party/waf/waflib/Tools/c_aliases.py
vendored
6
third_party/waf/waflib/Tools/c_aliases.py
vendored
@ -47,10 +47,12 @@ def sniff_features(**kw):
|
||||
if x in exts:
|
||||
feats.append('cxx')
|
||||
break
|
||||
|
||||
if 'c' in exts or 'vala' in exts or 'gs' in exts:
|
||||
feats.append('c')
|
||||
|
||||
if 's' in exts or 'S' in exts:
|
||||
feats.append('asm')
|
||||
|
||||
for x in 'f f90 F F90 for FOR'.split():
|
||||
if x in exts:
|
||||
feats.append('fc')
|
||||
@ -66,7 +68,7 @@ def sniff_features(**kw):
|
||||
if typ in ('program', 'shlib', 'stlib'):
|
||||
will_link = False
|
||||
for x in feats:
|
||||
if x in ('cxx', 'd', 'fc', 'c'):
|
||||
if x in ('cxx', 'd', 'fc', 'c', 'asm'):
|
||||
feats.append(x + typ)
|
||||
will_link = True
|
||||
if not will_link and not kw.get('features', []):
|
||||
|
9
third_party/waf/waflib/Tools/c_config.py
vendored
9
third_party/waf/waflib/Tools/c_config.py
vendored
@ -659,20 +659,21 @@ class test_exec(Task.Task):
|
||||
"""
|
||||
color = 'PINK'
|
||||
def run(self):
|
||||
cmd = [self.inputs[0].abspath()] + getattr(self.generator, 'test_args', [])
|
||||
if getattr(self.generator, 'rpath', None):
|
||||
if getattr(self.generator, 'define_ret', False):
|
||||
self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
|
||||
self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd)
|
||||
else:
|
||||
self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()])
|
||||
self.generator.bld.retval = self.generator.bld.exec_command(cmd)
|
||||
else:
|
||||
env = self.env.env or {}
|
||||
env.update(dict(os.environ))
|
||||
for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'):
|
||||
env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '')
|
||||
if getattr(self.generator, 'define_ret', False):
|
||||
self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()], env=env)
|
||||
self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd, env=env)
|
||||
else:
|
||||
self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()], env=env)
|
||||
self.generator.bld.retval = self.generator.bld.exec_command(cmd, env=env)
|
||||
|
||||
@feature('test_exec')
|
||||
@after_method('apply_link')
|
||||
|
3
third_party/waf/waflib/Tools/c_tests.py
vendored
3
third_party/waf/waflib/Tools/c_tests.py
vendored
@ -224,6 +224,7 @@ def check_endianness(self):
|
||||
def check_msg(self):
|
||||
return tmp[0]
|
||||
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
|
||||
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
|
||||
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp,
|
||||
okmsg=check_msg, confcache=None)
|
||||
return tmp[0]
|
||||
|
||||
|
1
third_party/waf/waflib/Tools/gas.py
vendored
1
third_party/waf/waflib/Tools/gas.py
vendored
@ -16,3 +16,4 @@ def configure(conf):
|
||||
conf.env.ASLNK_TGT_F = ['-o']
|
||||
conf.find_ar()
|
||||
conf.load('asm')
|
||||
conf.env.ASM_NAME = 'gas'
|
||||
|
2
third_party/waf/waflib/Tools/javaw.py
vendored
2
third_party/waf/waflib/Tools/javaw.py
vendored
@ -246,7 +246,7 @@ def use_javac_files(self):
|
||||
self.javac_task.dep_nodes.extend(tg.jar_task.outputs)
|
||||
else:
|
||||
if hasattr(tg, 'outdir'):
|
||||
base_node = tg.outdir.abspath()
|
||||
base_node = tg.outdir
|
||||
else:
|
||||
base_node = tg.path.get_bld()
|
||||
|
||||
|
5
third_party/waf/waflib/Tools/nasm.py
vendored
5
third_party/waf/waflib/Tools/nasm.py
vendored
@ -24,3 +24,8 @@ def configure(conf):
|
||||
conf.env.ASLNK_TGT_F = ['-o']
|
||||
conf.load('asm')
|
||||
conf.env.ASMPATH_ST = '-I%s' + os.sep
|
||||
txt = conf.cmd_and_log(conf.env.AS + ['--version'])
|
||||
if 'yasm' in txt.lower():
|
||||
conf.env.ASM_NAME = 'yasm'
|
||||
else:
|
||||
conf.env.ASM_NAME = 'nasm'
|
||||
|
27
third_party/waf/waflib/Tools/python.py
vendored
27
third_party/waf/waflib/Tools/python.py
vendored
@ -79,14 +79,19 @@ def process_py(self, node):
|
||||
"""
|
||||
Add signature of .py file, so it will be byte-compiled when necessary
|
||||
"""
|
||||
assert(hasattr(self, 'install_path')), 'add features="py"'
|
||||
assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.nice_path())
|
||||
self.install_from = getattr(self, 'install_from', None)
|
||||
relative_trick = getattr(self, 'relative_trick', True)
|
||||
if self.install_from:
|
||||
assert isinstance(self.install_from, Node.Node), \
|
||||
'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.nice_path(), type(self.install_from))
|
||||
|
||||
# where to install the python file
|
||||
if self.install_path:
|
||||
if self.install_from:
|
||||
self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=True)
|
||||
self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=relative_trick)
|
||||
else:
|
||||
self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=True)
|
||||
self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=relative_trick)
|
||||
|
||||
lst = []
|
||||
if self.env.PYC:
|
||||
@ -96,9 +101,11 @@ def process_py(self, node):
|
||||
|
||||
if self.install_path:
|
||||
if self.install_from:
|
||||
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env)
|
||||
target_dir = node.path_from(self.install_from) if relative_trick else node.name
|
||||
pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
|
||||
else:
|
||||
pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env)
|
||||
target_dir = node.path_from(self.path) if relative_trick else node.name
|
||||
pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env)
|
||||
else:
|
||||
pyd = node.abspath()
|
||||
|
||||
@ -115,7 +122,7 @@ def process_py(self, node):
|
||||
tsk.pyd = pyd
|
||||
|
||||
if self.install_path:
|
||||
self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=True)
|
||||
self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=relative_trick)
|
||||
|
||||
class pyc(Task.Task):
|
||||
"""
|
||||
@ -433,11 +440,11 @@ def check_python_headers(conf, features='pyembed pyext'):
|
||||
|
||||
# Code using the Python API needs to be compiled with -fno-strict-aliasing
|
||||
if env.CC_NAME == 'gcc':
|
||||
env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
|
||||
env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
|
||||
env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
|
||||
env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
|
||||
if env.CXX_NAME == 'gcc':
|
||||
env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
|
||||
env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
|
||||
env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
|
||||
env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
|
||||
|
||||
if env.CC_NAME == "msvc":
|
||||
from distutils.msvccompiler import MSVCCompiler
|
||||
|
11
third_party/waf/waflib/extras/doxygen.py
vendored
11
third_party/waf/waflib/extras/doxygen.py
vendored
@ -85,6 +85,12 @@ class doxygen(Task.Task):
|
||||
if not getattr(self, 'pars', None):
|
||||
txt = self.inputs[0].read()
|
||||
self.pars = parse_doxy(txt)
|
||||
|
||||
# Override with any parameters passed to the task generator
|
||||
if getattr(self.generator, 'pars', None):
|
||||
for k, v in self.generator.pars.items():
|
||||
self.pars[k] = v
|
||||
|
||||
if self.pars.get('OUTPUT_DIRECTORY'):
|
||||
# Use the path parsed from the Doxyfile as an absolute path
|
||||
output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
|
||||
@ -94,11 +100,6 @@ class doxygen(Task.Task):
|
||||
output_node.mkdir()
|
||||
self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()
|
||||
|
||||
# Override with any parameters passed to the task generator
|
||||
if getattr(self.generator, 'pars', None):
|
||||
for k, v in self.generator.pars.items():
|
||||
self.pars[k] = v
|
||||
|
||||
self.doxy_inputs = getattr(self, 'doxy_inputs', [])
|
||||
if not self.pars.get('INPUT'):
|
||||
self.doxy_inputs.append(self.inputs[0].parent)
|
||||
|
28
third_party/waf/waflib/extras/fast_partial.py
vendored
28
third_party/waf/waflib/extras/fast_partial.py
vendored
@ -18,6 +18,7 @@ Usage::
|
||||
opt.load('fast_partial')
|
||||
|
||||
Assumptions:
|
||||
* Start with a clean build (run "waf distclean" after enabling)
|
||||
* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled)
|
||||
try it in the folder generated by utils/genbench.py
|
||||
* For full project builds: no --targets and no pruning from subfolders
|
||||
@ -131,12 +132,18 @@ class bld_proxy(object):
|
||||
data[x] = getattr(self, x)
|
||||
db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
|
||||
|
||||
try:
|
||||
waflib.Node.pickle_lock.acquire()
|
||||
with waflib.Node.pickle_lock:
|
||||
waflib.Node.Nod3 = self.node_class
|
||||
x = Build.cPickle.dumps(data, Build.PROTOCOL)
|
||||
finally:
|
||||
waflib.Node.pickle_lock.release()
|
||||
try:
|
||||
x = Build.cPickle.dumps(data, Build.PROTOCOL)
|
||||
except Build.cPickle.PicklingError:
|
||||
root = data['root']
|
||||
for node_deps in data['node_deps'].values():
|
||||
for idx, node in enumerate(node_deps):
|
||||
# there may be more cross-context Node objects to fix,
|
||||
# but this should be the main source
|
||||
node_deps[idx] = root.find_node(node.abspath())
|
||||
x = Build.cPickle.dumps(data, Build.PROTOCOL)
|
||||
|
||||
Logs.debug('rev_use: storing %s', db)
|
||||
Utils.writef(db + '.tmp', x, m='wb')
|
||||
@ -393,12 +400,17 @@ def is_stale(self):
|
||||
Logs.debug('rev_use: must post %r because this is a clean build')
|
||||
return True
|
||||
|
||||
# 3. check if the configuration changed
|
||||
if os.stat(self.bld.bldnode.find_node('c4che/build.config.py').abspath()).st_mtime > dbstat:
|
||||
# 3.a check if the configuration exists
|
||||
cache_node = self.bld.bldnode.find_node('c4che/build.config.py')
|
||||
if not cache_node:
|
||||
return True
|
||||
|
||||
# 3.b check if the configuration changed
|
||||
if os.stat(cache_node.abspath()).st_mtime > dbstat:
|
||||
Logs.debug('rev_use: must post %r because the configuration has changed', self.name)
|
||||
return True
|
||||
|
||||
# 3.a any tstamp data?
|
||||
# 3.c any tstamp data?
|
||||
try:
|
||||
f_deps = self.bld.f_deps
|
||||
except AttributeError:
|
||||
|
194
third_party/waf/waflib/extras/genpybind.py
vendored
Normal file
194
third_party/waf/waflib/extras/genpybind.py
vendored
Normal file
@ -0,0 +1,194 @@
|
||||
import os
|
||||
import pipes
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from waflib import Logs, Task, Context
|
||||
from waflib.Tools.c_preproc import scan as scan_impl
|
||||
# ^-- Note: waflib.extras.gccdeps.scan does not work for us,
|
||||
# due to its current implementation:
|
||||
# The -MD flag is injected into the {C,CXX}FLAGS environment variable and
|
||||
# dependencies are read out in a separate step after compiling by reading
|
||||
# the .d file saved alongside the object file.
|
||||
# As the genpybind task refers to a header file that is never compiled itself,
|
||||
# gccdeps will not be able to extract the list of dependencies.
|
||||
|
||||
from waflib.TaskGen import feature, before_method
|
||||
|
||||
|
||||
def join_args(args):
|
||||
return " ".join(pipes.quote(arg) for arg in args)
|
||||
|
||||
|
||||
def configure(cfg):
|
||||
cfg.load("compiler_cxx")
|
||||
cfg.load("python")
|
||||
cfg.check_python_version(minver=(2, 7))
|
||||
if not cfg.env.LLVM_CONFIG:
|
||||
cfg.find_program("llvm-config", var="LLVM_CONFIG")
|
||||
if not cfg.env.GENPYBIND:
|
||||
cfg.find_program("genpybind", var="GENPYBIND")
|
||||
|
||||
# find clang reasource dir for builtin headers
|
||||
cfg.env.GENPYBIND_RESOURCE_DIR = os.path.join(
|
||||
cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--libdir"]).strip(),
|
||||
"clang",
|
||||
cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--version"]).strip())
|
||||
if os.path.exists(cfg.env.GENPYBIND_RESOURCE_DIR):
|
||||
cfg.msg("Checking clang resource dir", cfg.env.GENPYBIND_RESOURCE_DIR)
|
||||
else:
|
||||
cfg.fatal("Clang resource dir not found")
|
||||
|
||||
|
||||
@feature("genpybind")
|
||||
@before_method("process_source")
|
||||
def generate_genpybind_source(self):
|
||||
"""
|
||||
Run genpybind on the headers provided in `source` and compile/link the
|
||||
generated code instead. This works by generating the code on the fly and
|
||||
swapping the source node before `process_source` is run.
|
||||
"""
|
||||
# name of module defaults to name of target
|
||||
module = getattr(self, "module", self.target)
|
||||
|
||||
# create temporary source file in build directory to hold generated code
|
||||
out = "genpybind-%s.%d.cpp" % (module, self.idx)
|
||||
out = self.path.get_bld().find_or_declare(out)
|
||||
|
||||
task = self.create_task("genpybind", self.to_nodes(self.source), out)
|
||||
# used to detect whether CFLAGS or CXXFLAGS should be passed to genpybind
|
||||
task.features = self.features
|
||||
task.module = module
|
||||
# can be used to select definitions to include in the current module
|
||||
# (when header files are shared by more than one module)
|
||||
task.genpybind_tags = self.to_list(getattr(self, "genpybind_tags", []))
|
||||
# additional include directories
|
||||
task.includes = self.to_list(getattr(self, "includes", []))
|
||||
task.genpybind = self.env.GENPYBIND
|
||||
|
||||
# Tell waf to compile/link the generated code instead of the headers
|
||||
# originally passed-in via the `source` parameter. (see `process_source`)
|
||||
self.source = [out]
|
||||
|
||||
|
||||
class genpybind(Task.Task): # pylint: disable=invalid-name
|
||||
"""
|
||||
Runs genpybind on headers provided as input to this task.
|
||||
Generated code will be written to the first (and only) output node.
|
||||
"""
|
||||
quiet = True
|
||||
color = "PINK"
|
||||
scan = scan_impl
|
||||
|
||||
@staticmethod
|
||||
def keyword():
|
||||
return "Analyzing"
|
||||
|
||||
def run(self):
|
||||
if not self.inputs:
|
||||
return
|
||||
|
||||
args = self.find_genpybind() + self._arguments(
|
||||
resource_dir=self.env.GENPYBIND_RESOURCE_DIR)
|
||||
|
||||
output = self.run_genpybind(args)
|
||||
|
||||
# For debugging / log output
|
||||
pasteable_command = join_args(args)
|
||||
|
||||
# write generated code to file in build directory
|
||||
# (will be compiled during process_source stage)
|
||||
(output_node,) = self.outputs
|
||||
output_node.write("// {}\n{}\n".format(
|
||||
pasteable_command.replace("\n", "\n// "), output))
|
||||
|
||||
def find_genpybind(self):
|
||||
return self.genpybind
|
||||
|
||||
def run_genpybind(self, args):
|
||||
bld = self.generator.bld
|
||||
|
||||
kwargs = dict(cwd=bld.variant_dir)
|
||||
if hasattr(bld, "log_command"):
|
||||
bld.log_command(args, kwargs)
|
||||
else:
|
||||
Logs.debug("runner: {!r}".format(args))
|
||||
proc = subprocess.Popen(
|
||||
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
|
||||
stdout, stderr = proc.communicate()
|
||||
|
||||
if not isinstance(stdout, str):
|
||||
stdout = stdout.decode(sys.stdout.encoding, errors="replace")
|
||||
if not isinstance(stderr, str):
|
||||
stderr = stderr.decode(sys.stderr.encoding, errors="replace")
|
||||
|
||||
if proc.returncode != 0:
|
||||
bld.fatal(
|
||||
"genpybind returned {code} during the following call:"
|
||||
"\n{command}\n\n{stdout}\n\n{stderr}".format(
|
||||
code=proc.returncode,
|
||||
command=join_args(args),
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
))
|
||||
|
||||
if stderr.strip():
|
||||
Logs.debug("non-fatal warnings during genpybind run:\n{}".format(stderr))
|
||||
|
||||
return stdout
|
||||
|
||||
def _include_paths(self):
|
||||
return self.generator.to_incnodes(self.includes + self.env.INCLUDES)
|
||||
|
||||
def _inputs_as_relative_includes(self):
|
||||
include_paths = self._include_paths()
|
||||
relative_includes = []
|
||||
for node in self.inputs:
|
||||
for inc in include_paths:
|
||||
if node.is_child_of(inc):
|
||||
relative_includes.append(node.path_from(inc))
|
||||
break
|
||||
else:
|
||||
self.generator.bld.fatal("could not resolve {}".format(node))
|
||||
return relative_includes
|
||||
|
||||
def _arguments(self, genpybind_parse=None, resource_dir=None):
|
||||
args = []
|
||||
relative_includes = self._inputs_as_relative_includes()
|
||||
is_cxx = "cxx" in self.features
|
||||
|
||||
# options for genpybind
|
||||
args.extend(["--genpybind-module", self.module])
|
||||
if self.genpybind_tags:
|
||||
args.extend(["--genpybind-tag"] + self.genpybind_tags)
|
||||
if relative_includes:
|
||||
args.extend(["--genpybind-include"] + relative_includes)
|
||||
if genpybind_parse:
|
||||
args.extend(["--genpybind-parse", genpybind_parse])
|
||||
|
||||
args.append("--")
|
||||
|
||||
# headers to be processed by genpybind
|
||||
args.extend(node.abspath() for node in self.inputs)
|
||||
|
||||
args.append("--")
|
||||
|
||||
# options for clang/genpybind-parse
|
||||
args.append("-D__GENPYBIND__")
|
||||
args.append("-xc++" if is_cxx else "-xc")
|
||||
has_std_argument = False
|
||||
for flag in self.env["CXXFLAGS" if is_cxx else "CFLAGS"]:
|
||||
flag = flag.replace("-std=gnu", "-std=c")
|
||||
if flag.startswith("-std=c"):
|
||||
has_std_argument = True
|
||||
args.append(flag)
|
||||
if not has_std_argument:
|
||||
args.append("-std=c++14")
|
||||
args.extend("-I{}".format(n.abspath()) for n in self._include_paths())
|
||||
args.extend("-D{}".format(p) for p in self.env.DEFINES)
|
||||
|
||||
# point to clang resource dir, if specified
|
||||
if resource_dir:
|
||||
args.append("-resource-dir={}".format(resource_dir))
|
||||
|
||||
return args
|
8
third_party/waf/waflib/extras/local_rpath.py
vendored
8
third_party/waf/waflib/extras/local_rpath.py
vendored
@ -2,18 +2,20 @@
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2011 (ita)
|
||||
|
||||
import copy
|
||||
from waflib.TaskGen import after_method, feature
|
||||
|
||||
@after_method('propagate_uselib_vars')
|
||||
@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
|
||||
def add_rpath_stuff(self):
|
||||
all = self.to_list(getattr(self, 'use', []))
|
||||
all = copy.copy(self.to_list(getattr(self, 'use', [])))
|
||||
while all:
|
||||
name = all.pop()
|
||||
try:
|
||||
tg = self.bld.get_tgen_by_name(name)
|
||||
except:
|
||||
continue
|
||||
self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
|
||||
all.extend(self.to_list(getattr(tg, 'use', [])))
|
||||
if hasattr(tg, 'link_task'):
|
||||
self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
|
||||
all.extend(self.to_list(getattr(tg, 'use', [])))
|
||||
|
||||
|
9
third_party/waf/waflib/extras/objcopy.py
vendored
9
third_party/waf/waflib/extras/objcopy.py
vendored
@ -15,7 +15,7 @@ objcopy_flags Additional flags passed to objcopy.
|
||||
"""
|
||||
|
||||
from waflib.Utils import def_attrs
|
||||
from waflib import Task
|
||||
from waflib import Task, Options
|
||||
from waflib.TaskGen import feature, after_method
|
||||
|
||||
class objcopy(Task.Task):
|
||||
@ -46,5 +46,8 @@ def map_objcopy(self):
|
||||
self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0])
|
||||
|
||||
def configure(ctx):
|
||||
ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
|
||||
|
||||
program_name = 'objcopy'
|
||||
prefix = getattr(Options.options, 'cross_prefix', None)
|
||||
if prefix:
|
||||
program_name = '{}-{}'.format(prefix, program_name)
|
||||
ctx.find_program(program_name, var='OBJCOPY', mandatory=True)
|
||||
|
Loading…
x
Reference in New Issue
Block a user