1
0
mirror of https://github.com/samba-team/samba.git synced 2025-03-27 22:50:26 +03:00

third_party:waf: update to upstream 2.0.4 release

Update third_party/waf/ to 2.0.4 to bring us closer to Python 3

This change requires a number of changes in buildtools/ too.

Signed-off-by: Alexander Bokovoy <ab@samba.org>
Reviewed-by: Andrew Bartlett <abartlet@samba.org>
This commit is contained in:
Alexander Bokovoy 2018-01-31 11:48:43 +02:00 committed by Andrew Bartlett
parent faef275069
commit 4e65b33c1d
219 changed files with 9916 additions and 4585 deletions

33
buildtools/bin/waf vendored
View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# encoding: ISO8859-1
# Thomas Nagy, 2005-2015
# encoding: latin-1
# Thomas Nagy, 2005-2018
#
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
@ -32,16 +32,18 @@ POSSIBILITY OF SUCH DAMAGE.
import os, sys, inspect
VERSION="1.9.10"
VERSION="2.0.4"
REVISION="x"
GIT="x"
INSTALL=''
C1='#>'
C2='#6'
C3='#4'
INSTALL="x"
C1='x'
C2='x'
C3='x'
cwd = os.getcwd()
join = os.path.join
if sys.hexversion<0x206000f:
raise ImportError('Python >= 2.6 is required to create the waf file')
WAF='waf'
def b(x):
@ -129,15 +131,16 @@ def test(dir):
pass
def find_lib():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf'))
path = '../../third_party/waf'
paths = [path, path+'/waflib']
return [os.path.abspath(os.path.join(os.path.dirname(__file__), x)) for x in paths]
wafdir = find_lib()
sys.path.insert(0, wafdir)
for p in wafdir:
sys.path.insert(0, p)
if __name__ == '__main__':
# TODO: remove these when possible
from waflib.extras import compat15
#import extras.compat15#PRELUDE
import sys
from waflib.Tools import ccroot, c, ar, compiler_c, gcc
@ -147,7 +150,7 @@ if __name__ == '__main__':
sys.modules['compiler_cc'] = compiler_c
sys.modules['gcc'] = gcc
from waflib import Options
from waflib import Options
Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0:
os.environ['NOCLIMB'] = "1"
@ -160,5 +163,5 @@ if __name__ == '__main__':
Task.classes['cc_link'] = o
from waflib import Scripting
Scripting.waf_entry_point(cwd, VERSION, wafdir)
Scripting.waf_entry_point(cwd, VERSION, wafdir[0])

164
buildtools/bin/waf-1.9 Executable file
View File

@ -0,0 +1,164 @@
#!/usr/bin/env python
# encoding: ISO8859-1
# Thomas Nagy, 2005-2015
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os, sys, inspect
VERSION="1.9.10"
REVISION="x"
GIT="x"
INSTALL=''
C1='#>'
C2='#6'
C3='#4'
cwd = os.getcwd()
join = os.path.join
WAF='waf'
def b(x):
return x
if sys.hexversion>0x300000f:
WAF='waf3'
def b(x):
return x.encode()
def err(m):
print(('\033[91mError: %s\033[0m' % m))
sys.exit(1)
def unpack_wafdir(dir, src):
f = open(src,'rb')
c = 'corrupt archive (%d)'
while 1:
line = f.readline()
if not line: err('run waf-light from a folder containing waflib')
if line == b('#==>\n'):
txt = f.readline()
if not txt: err(c % 1)
if f.readline() != b('#<==\n'): err(c % 2)
break
if not txt: err(c % 3)
txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
import shutil, tarfile
try: shutil.rmtree(dir)
except OSError: pass
try:
for x in ('Tools', 'extras'):
os.makedirs(join(dir, 'waflib', x))
except OSError:
err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
os.chdir(dir)
tmp = 't.bz2'
t = open(tmp,'wb')
try: t.write(txt)
finally: t.close()
try:
t = tarfile.open(tmp)
except:
try:
os.system('bunzip2 t.bz2')
t = tarfile.open('t')
tmp = 't'
except:
os.chdir(cwd)
try: shutil.rmtree(dir)
except OSError: pass
err("Waf cannot be unpacked, check that bzip2 support is present")
try:
for x in t: t.extract(x)
finally:
t.close()
for x in ('Tools', 'extras'):
os.chmod(join('waflib',x), 493)
if sys.hexversion<0x300000f:
sys.path = [join(dir, 'waflib')] + sys.path
import fixpy2
fixpy2.fixdir(dir)
os.remove(tmp)
os.chdir(cwd)
try: dir = unicode(dir, 'mbcs')
except: pass
try:
from ctypes import windll
windll.kernel32.SetFileAttributesW(dir, 2)
except:
pass
def test(dir):
try:
os.stat(join(dir, 'waflib'))
return os.path.abspath(dir)
except OSError:
pass
def find_lib():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf'))
wafdir = find_lib()
sys.path.insert(0, wafdir)
if __name__ == '__main__':
# TODO: remove these when possible
from waflib.extras import compat15
import sys
from waflib.Tools import ccroot, c, ar, compiler_c, gcc
sys.modules['cc'] = c
sys.modules['ccroot'] = ccroot
sys.modules['ar'] = ar
sys.modules['compiler_cc'] = compiler_c
sys.modules['gcc'] = gcc
from waflib import Options
Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0:
os.environ['NOCLIMB'] = "1"
# there is a single top-level, but libraries must build independently
os.environ['NO_LOCK_IN_TOP'] = "1"
from waflib import Task
class o(object):
display = None
Task.classes['cc_link'] = o
from waflib import Scripting
Scripting.waf_entry_point(cwd, VERSION, wafdir)

View File

@ -1,7 +1,9 @@
# handle substitution of variables in .in files
import re, os
import Build, sys, Logs
import sys
import re
import os
from waflib import Build, Logs
from samba_utils import SUBST_VARS_RECURSIVE
def subst_at_vars(task):

View File

@ -3,11 +3,11 @@
# based on suncc.py from waf
import os, optparse
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
from waflib import Utils, Options, Configure
from waflib.Tools import ccroot, ar
from waflib.Configure import conftest
from compiler_cc import c_compiler
from waflib.Tools.compiler_c import c_compiler
c_compiler['default'] = ['gcc', 'generic_cc']
c_compiler['hpux'] = ['gcc', 'generic_cc']

View File

@ -2,10 +2,10 @@
# based on suncc.py from waf
import os, optparse, sys
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
import gcc
from waflib import Utils, Options, Configure
from waflib.Tools import ccroot, ar
from waflib.Configure import conftest
from waflib.Tools import gcc
@conftest
@ -38,7 +38,7 @@ def gcc_modifier_hpux(conf):
gcc.gcc_modifier_hpux = gcc_modifier_hpux
from TaskGen import feature, after
from waflib.TaskGen import feature, after
@feature('cprogram', 'cshlib')
@after('apply_link', 'apply_lib_vars', 'apply_obj_vars')
def hpux_addfullpath(self):

View File

@ -3,11 +3,11 @@
# based on suncc.py from waf
import os, optparse
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
from waflib import Utils, Options, Configure
from waflib.Tools import ccroot, ar
from waflib.Configure import conftest
from compiler_cc import c_compiler
from waflib.Tools.compiler_c import c_compiler
c_compiler['irix'] = ['gcc', 'irixcc']

View File

@ -13,8 +13,8 @@
import sys, random, threading
try: from Queue import Queue
except ImportError: from queue import Queue
import Utils, Options
from Constants import EXCEPTION, CRASHED, MAXJOBS, ASK_LATER, SKIPPED, SKIP_ME, SUCCESS
from waflib import Utils, Options, Errors
from waflib.TaskGen import EXCEPTION, CRASHED, MAXJOBS, ASK_LATER, SKIPPED, SKIP_ME, SUCCESS
GAP = 15
@ -58,7 +58,7 @@ def process(tsk):
else:
try:
tsk.post_run()
except Utils.WafError:
except Errors.WafError:
pass
except Exception:
tsk.err_msg = Utils.ex_stack()

View File

@ -1,7 +1,7 @@
# handle substitution of variables in pc files
import os, re, sys
import Build, Logs
from waflib import Build, Logs
from samba_utils import SUBST_VARS_RECURSIVE, TO_LIST
def subst_at_vars(task):

View File

@ -1,11 +1,12 @@
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
import Options, Build, os
import os
from waflib import Options, Build
from samba_utils import os_path_relpath, TO_LIST, samba_add_onoff_option
from samba_autoconf import library_flags
Options.Handler.SAMBA3_ADD_OPTION = samba_add_onoff_option
Options.OptionsContext.SAMBA3_ADD_OPTION = samba_add_onoff_option
def SAMBA3_IS_STATIC_MODULE(bld, module):
'''Check whether module is in static list'''
@ -32,7 +33,7 @@ def s3_fix_kwargs(bld, kwargs):
'''fix the build arguments for s3 build rules to include the
necessary includes, subdir and cflags options '''
s3dir = os.path.join(bld.env.srcdir, 'source3')
s3reldir = os_path_relpath(s3dir, bld.curdir)
s3reldir = os_path_relpath(s3dir, bld.path.abspath())
# the extra_includes list is relative to the source3 directory
extra_includes = [ '.', 'include', 'lib' ]

View File

@ -1,7 +1,13 @@
# functions for handling ABI checking of libraries
import Options, Utils, os, Logs, samba_utils, sys, Task, fnmatch, re, Build
from TaskGen import feature, before, after
import os
import sys
import re
import fnmatch
from waflib import Options, Utils, Logs, Task, Build, Errors
from waflib.TaskGen import feature, before, after
import samba_utils
# these type maps cope with platform specific names for common types
# please add new type mappings into the list below
@ -87,7 +93,7 @@ def abi_check_task(self):
old_sigs = samba_utils.load_file(sig_file)
if old_sigs is None or Options.options.ABI_UPDATE:
if not save_sigs(sig_file, parsed_sigs):
raise Utils.WafError('Failed to save ABI file "%s"' % sig_file)
raise Errors.WafError('Failed to save ABI file "%s"' % sig_file)
Logs.warn('Generated ABI signatures %s' % sig_file)
return
@ -112,10 +118,10 @@ def abi_check_task(self):
got_error = True
if got_error:
raise Utils.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)
raise Errors.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information\nIf you have not changed any ABI, and your platform always gives this error, please configure with --abi-check-disable to skip this check' % libname)
t = Task.task_type_from_func('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
t = Task.task_factory('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
t.quiet = True
# allow "waf --abi-check" to force re-checking the ABI
if '--abi-check' in sys.argv:

View File

@ -1,9 +1,10 @@
# a waf tool to add autoconf-like macros to the configure section
import os, sys
import Build, Options, preproc, Logs
from Configure import conf
from TaskGen import feature
from waflib import Build, Options, Logs, Context
from waflib.Configure import conf
from waflib.TaskGen import feature
from waflib.Tools import c_preproc as preproc
from samba_utils import TO_LIST, GET_TARGET_TYPE, SET_TARGET_TYPE, unique_list, mkdir_p
missing_headers = set()
@ -44,11 +45,11 @@ def COMPOUND_START(conf, msg):
if v != [] and v != 0:
conf.env.in_compound = v + 1
return
conf.check_message_1(msg)
conf.saved_check_message_1 = conf.check_message_1
conf.check_message_1 = null_check_message_1
conf.saved_check_message_2 = conf.check_message_2
conf.check_message_2 = null_check_message_2
conf.start_msg(msg)
conf.saved_check_message_1 = conf.start_msg
conf.start_msg = null_check_message_1
conf.saved_check_message_2 = conf.end_msg
conf.end_msg = null_check_message_2
conf.env.in_compound = 1
@ -58,9 +59,9 @@ def COMPOUND_END(conf, result):
conf.env.in_compound -= 1
if conf.env.in_compound != 0:
return
conf.check_message_1 = conf.saved_check_message_1
conf.check_message_2 = conf.saved_check_message_2
p = conf.check_message_2
conf.start_msg = conf.saved_check_message_1
conf.end_msg = conf.saved_check_message_2
p = conf.end_msg
if result is True:
p('ok')
elif not result:
@ -404,7 +405,7 @@ def CHECK_CODE(conf, code, define,
cflags.append(extra_cflags)
if local_include:
cflags.append('-I%s' % conf.curdir)
cflags.append('-I%s' % conf.path.abspath())
if not link:
type='nolink'
@ -663,8 +664,8 @@ def CHECK_FUNCS_IN(conf, list, library, mandatory=False, checklibc=False,
@conf
def IN_LAUNCH_DIR(conf):
'''return True if this rule is being run from the launch directory'''
return os.path.realpath(conf.curdir) == os.path.realpath(Options.launch_dir)
Options.Handler.IN_LAUNCH_DIR = IN_LAUNCH_DIR
return os.path.realpath(conf.path.abspath()) == os.path.realpath(Context.launch_dir)
Options.OptionsContext.IN_LAUNCH_DIR = IN_LAUNCH_DIR
@conf
@ -899,7 +900,7 @@ def SETUP_CONFIGURE_CACHE(conf, enable):
# when -C is chosen, we will use a private cache and will
# not look into system includes. This roughtly matches what
# autoconf does with -C
cache_path = os.path.join(conf.blddir, '.confcache')
cache_path = os.path.join(conf.bldnode.abspath(), '.confcache')
mkdir_p(cache_path)
Options.cache_global = os.environ['WAFCACHE'] = cache_path
else:

View File

@ -1,13 +1,13 @@
# waf build tool for building automatic prototypes from C source
import os
import Build
from waflib import Build
from samba_utils import SET_TARGET_TYPE, os_path_relpath
def SAMBA_AUTOPROTO(bld, header, source):
'''rule for samba prototype generation'''
bld.SET_BUILD_GROUP('prototypes')
relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath())
relpath = os_path_relpath(bld.path.abspath(), bld.srcnode.abspath())
name = os.path.join(relpath, header)
SET_TARGET_TYPE(bld, name, 'PROTOTYPE')
t = bld(

View File

@ -1,8 +1,8 @@
# functions to support bundled libraries
import sys
import Build, Options, Logs
from Configure import conf
from waflib import Build, Options, Logs
from waflib.Configure import conf
from samba_utils import TO_LIST
def PRIVATE_NAME(bld, name, private_extension, private_library):
@ -51,19 +51,19 @@ Build.BuildContext.BUILTIN_LIBRARY = BUILTIN_LIBRARY
def BUILTIN_DEFAULT(opt, builtins):
'''set a comma separated default list of builtin libraries for this package'''
if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options:
if 'BUILTIN_LIBRARIES_DEFAULT' in Options.options.__dict__:
return
Options.options['BUILTIN_LIBRARIES_DEFAULT'] = builtins
Options.Handler.BUILTIN_DEFAULT = BUILTIN_DEFAULT
Options.options.__dict__['BUILTIN_LIBRARIES_DEFAULT'] = builtins
Options.OptionsContext.BUILTIN_DEFAULT = BUILTIN_DEFAULT
def PRIVATE_EXTENSION_DEFAULT(opt, extension, noextension=''):
'''set a default private library extension'''
if 'PRIVATE_EXTENSION_DEFAULT' in Options.options:
if 'PRIVATE_EXTENSION_DEFAULT' in Options.options.__dict__:
return
Options.options['PRIVATE_EXTENSION_DEFAULT'] = extension
Options.options['PRIVATE_EXTENSION_EXCEPTION'] = noextension
Options.Handler.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT
Options.options.__dict__['PRIVATE_EXTENSION_DEFAULT'] = extension
Options.options.__dict__['PRIVATE_EXTENSION_EXCEPTION'] = noextension
Options.OptionsContext.PRIVATE_EXTENSION_DEFAULT = PRIVATE_EXTENSION_DEFAULT
def minimum_library_version(conf, libname, default):

View File

@ -2,35 +2,35 @@
# to test for commonly needed configuration options
import os, shutil, re
import Build, Configure, Utils, Options, Logs
from Configure import conf
from waflib import Build, Configure, Utils, Options, Logs, Errors
from waflib.Configure import conf
from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH
def add_option(self, *k, **kw):
'''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests'''
Options.parser = self
Options.OptionsContext.parser = self
match = kw.get('match', [])
if match:
del kw['match']
opt = self.parser.add_option(*k, **kw)
opt.match = match
return opt
Options.Handler.add_option = add_option
Options.OptionsContext.add_option = add_option
@conf
def check(self, *k, **kw):
'''Override the waf defaults to inject --with-directory options'''
if not 'env' in kw:
kw['env'] = self.env.copy()
kw['env'] = self.env.derive()
# match the configuration test with specific options, for example:
# --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
additional_dirs = []
if 'msg' in kw:
msg = kw['msg']
for x in Options.Handler.parser.parser.option_list:
for x in Options.OptionsContext.parser.parser.option_list:
if getattr(x, 'match', None) and msg in x.match:
d = getattr(Options.options, x.dest, '')
if d:
@ -47,12 +47,12 @@ def check(self, *k, **kw):
add_options_dir(additional_dirs, kw['env'])
self.validate_c(kw)
self.check_message_1(kw['msg'])
self.start_msg(kw['msg'])
ret = None
try:
ret = self.run_c_code(*k, **kw)
except Configure.ConfigurationError as e:
self.check_message_2(kw['errmsg'], 'YELLOW')
self.end_msg(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw and kw['mandatory']:
if Logs.verbose > 1:
raise
@ -60,7 +60,7 @@ def check(self, *k, **kw):
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
kw['success'] = ret
self.check_message_2(self.ret_msg(kw['okmsg'], kw))
self.end_msg(self.ret_msg(kw['okmsg'], kw))
# success! keep the CPPPATH/LIBPATH
add_options_dir(additional_dirs, self.env)
@ -163,7 +163,7 @@ def find_config_dir(conf):
'''find a directory to run tests in'''
k = 0
while k < 10000:
dir = os.path.join(conf.blddir, '.conf_check_%d' % k)
dir = os.path.join(conf.bldnode.abspath(), '.conf_check_%d' % k)
try:
shutil.rmtree(dir)
except OSError:
@ -338,7 +338,8 @@ def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None):
# we need to run the program, try to get its result
args = conf.SAMBA_CROSS_ARGS(msg=msg)
proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
proc = Utils.subprocess.Popen([lastprog] + args,
stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE)
(out, err) = proc.communicate()
w = conf.log.write
w(str(out))
@ -365,7 +366,7 @@ def CHECK_PERL_MANPAGE(conf, msg=None, section=None):
else:
msg = "perl manpage generation"
conf.check_message_1(msg)
conf.start_msg(msg)
dir = find_config_dir(conf)
@ -382,28 +383,28 @@ WriteMakefile(
""")
back = os.path.abspath('.')
os.chdir(bdir)
proc = Utils.pproc.Popen(['perl', 'Makefile.PL'],
stdout=Utils.pproc.PIPE,
stderr=Utils.pproc.PIPE)
proc = Utils.subprocess.Popen(['perl', 'Makefile.PL'],
stdout=Utils.subprocess.PIPE,
stderr=Utils.subprocess.PIPE)
(out, err) = proc.communicate()
os.chdir(back)
ret = (proc.returncode == 0)
if not ret:
conf.check_message_2('not found', color='YELLOW')
conf.end_msg('not found', color='YELLOW')
return
if section:
man = Utils.readf(os.path.join(bdir,'Makefile'))
m = re.search('MAN%sEXT\s+=\s+(\w+)' % section, man)
if not m:
conf.check_message_2('not found', color='YELLOW')
conf.end_msg('not found', color='YELLOW')
return
ext = m.group(1)
conf.check_message_2(ext)
conf.end_msg(ext)
return ext
conf.check_message_2('ok')
conf.end_msg('ok')
return True
@ -512,7 +513,7 @@ def CHECK_STANDARD_LIBPATH(conf):
# option not supported by compiler - use a standard list of directories
dirlist = [ '/usr/lib', '/usr/lib64' ]
except:
raise Utils.WafError('Unexpected error running "%s"' % (cmd))
raise Errors.WafError('Unexpected error running "%s"' % (cmd))
else:
dirlist = []
for line in out:

View File

@ -1,8 +1,8 @@
# functions for handling cross-compilation
import os, sys, re, shlex
import Utils, Logs, Options
from Configure import conf
from waflib import Utils, Logs, Options, Errors
from waflib.Configure import conf
real_Popen = None
@ -81,12 +81,12 @@ def cross_answer(ca_file, msg):
f.close()
return (int(m.group(1)), m.group(2))
else:
raise Utils.WafError("Bad answer format '%s' in %s" % (line, ca_file))
raise Errors.WafError("Bad answer format '%s' in %s" % (line, ca_file))
f.close()
return ANSWER_UNKNOWN
class cross_Popen(Utils.pproc.Popen):
class cross_Popen(Utils.subprocess.Popen):
'''cross-compilation wrapper for Popen'''
def __init__(*k, **kw):
(obj, args) = k
@ -154,11 +154,11 @@ def SAMBA_CROSS_ARGS(conf, msg=None):
if conf.env.CROSS_ANSWERS:
if msg is None:
raise Utils.WafError("Cannot have NULL msg in cross-answers")
raise Errors.WafError("Cannot have NULL msg in cross-answers")
ret.extend(['--cross-answers', os.path.join(Options.launch_dir, conf.env.CROSS_ANSWERS), msg])
if ret == []:
raise Utils.WafError("Cannot cross-compile without either --cross-execute or --cross-answers")
raise Errors.WafError("Cannot cross-compile without either --cross-execute or --cross-answers")
return ret
@ -167,5 +167,5 @@ def SAMBA_CROSS_CHECK_COMPLETE(conf):
'''check if we have some unanswered questions'''
global cross_answers_incomplete
if conf.env.CROSS_COMPILE and cross_answers_incomplete:
raise Utils.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS)
raise Errors.WafError("Cross answers file %s is incomplete" % conf.env.CROSS_ANSWERS)
return True

View File

@ -2,9 +2,10 @@
import os, sys, re, time
import Build, Environment, Options, Logs, Utils
from Logs import debug
from Configure import conf
from waflib import Build, Options, Logs, Utils, Errors
from waflib.Logs import debug
from waflib.Configure import conf
from waflib import ConfigSet
from samba_bundled import BUILTIN_LIBRARY
from samba_utils import LOCAL_CACHE, TO_LIST, get_tgt_list, unique_list, os_path_relpath
@ -302,7 +303,7 @@ def check_duplicate_sources(bld, tgt_list):
Logs.warn("WARNING: source %s is in more than one target: %s" % (s, subsystems[s].keys()))
for tname in subsystems[s]:
if len(subsystems[s][tname]) > 1:
raise Utils.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
raise Errors.WafError("ERROR: source %s is in more than one subsystem of target '%s': %s" % (s, tname, subsystems[s][tname]))
return True
@ -963,7 +964,7 @@ savedeps_files = ['buildtools/wafsamba/samba_deps.py']
def save_samba_deps(bld, tgt_list):
'''save the dependency calculations between builds, to make
further builds faster'''
denv = Environment.Environment()
denv = ConfigSet.ConfigSet()
denv.version = savedeps_version
denv.savedeps_inputs = savedeps_inputs
@ -1017,8 +1018,8 @@ def save_samba_deps(bld, tgt_list):
def load_samba_deps(bld, tgt_list):
'''load a previous set of build dependencies if possible'''
depsfile = os.path.join(bld.bdir, "sambadeps")
denv = Environment.Environment()
depsfile = os.path.join(bld.bldnode.abspath(), "sambadeps")
denv = ConfigSet.ConfigSet()
try:
debug('deps: checking saved dependencies')
denv.load_fast(depsfile)

View File

@ -2,8 +2,8 @@
# uses git ls-files to get file lists
import os, sys, tarfile
import Utils, Scripting, Logs, Options
from Configure import conf
from waflib import Utils, Scripting, Logs, Options
from waflib.Configure import conf
from samba_utils import os_path_relpath
from waflib import Context
@ -164,12 +164,12 @@ def dist(appname='', version=''):
if not isinstance(appname, str) or not appname:
# this copes with a mismatch in the calling arguments for dist()
appname = Utils.g_module.APPNAME
version = Utils.g_module.VERSION
appname = Context.g_module.APPNAME
version = Context.g_module.VERSION
if not version:
version = Utils.g_module.VERSION
version = Context.g_module.VERSION
srcdir = os.path.normpath(os.path.join(os.path.dirname(Utils.g_module.root_path), Utils.g_module.srcdir))
srcdir = os.path.normpath(os.path.join(os.path.dirname(Context.g_module.root_path), Context.g_module.srcdir))
if not dist_dirs:
Logs.error('You must use samba_dist.DIST_DIRS() to set which directories to package')

View File

@ -1,7 +1,7 @@
# specialist handling of header files for Samba
import os, re, sys, fnmatch
import Build, Logs, Utils
from waflib import Build, Logs, Utils, Errors
from samba_utils import TO_LIST, os_path_relpath
@ -99,7 +99,7 @@ def create_public_header(task):
os.unlink(tgt)
sys.stderr.write("%s:%u:Error: unable to resolve public header %s (maybe try one of %s)\n" % (
os.path.relpath(src, os.getcwd()), linenumber, hpath, suggested))
raise Utils.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (
raise Errors.WafError("Unable to resolve header path '%s' in public header '%s' in directory %s" % (
hpath, relsrc, task.env.RELPATH))
infile.close()
outfile.close()
@ -148,11 +148,12 @@ def PUBLIC_HEADERS(bld, public_headers, header_path=None, public_headers_install
else:
h_name = h
inst_name = os.path.basename(h)
relpath1 = os_path_relpath(bld.srcnode.abspath(), bld.curdir)
relpath2 = os_path_relpath(bld.curdir, bld.srcnode.abspath())
curdir = bld.path.abspath()
relpath1 = os_path_relpath(bld.srcnode.abspath(), curdir)
relpath2 = os_path_relpath(curdir, bld.srcnode.abspath())
targetdir = os.path.normpath(os.path.join(relpath1, bld.env.build_public_headers, inst_path))
if not os.path.exists(os.path.join(bld.curdir, targetdir)):
raise Utils.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))
if not os.path.exists(os.path.join(curdir, targetdir)):
raise Errors.WafError("missing source directory %s for public header %s" % (targetdir, inst_name))
target = os.path.join(targetdir, inst_name)
# the source path of the header, relative to the top of the source tree

View File

@ -4,8 +4,8 @@
# library use
import os
import Utils
from TaskGen import feature, before, after
from waflib import Utils, Errors
from waflib.TaskGen import feature, before, after
from samba_utils import LIB_PATH, MODE_755, install_rpath, build_rpath
@feature('install_bin')
@ -228,7 +228,7 @@ def symlink_bin(self):
return
if not self.link_task.outputs or not self.link_task.outputs[0]:
raise Utils.WafError('no outputs found for %s in symlink_bin' % self.name)
raise Errors.WafError('no outputs found for %s in symlink_bin' % self.name)
binpath = self.link_task.outputs[0].abspath(self.env)
bldpath = os.path.join(self.bld.env.BUILD_DIRECTORY, self.link_task.outputs[0].name)

View File

@ -1,6 +1,6 @@
# a waf tool to add extension based build patterns for Samba
import Build
from waflib import Build
from wafsamba import samba_version_file
def write_version_header(task):

View File

@ -1,5 +1,5 @@
import Utils
from Configure import conf
from waflib import Utils
from waflib.Configure import conf
done = {}
@ -9,7 +9,7 @@ def SAMBA_CHECK_PERL(conf, mandatory=True, version=(5,0,0)):
return
done["done"] = True
conf.find_program('perl', var='PERL', mandatory=mandatory)
conf.check_tool('perl')
conf.load('perl')
path_perl = conf.find_program('perl')
conf.env.PERL_SPECIFIED = (conf.env.PERL != path_perl)
conf.check_perl_version(version)

View File

@ -1,8 +1,8 @@
# waf build tool for building IDL files with pidl
import os
import Build, Utils
from TaskGen import feature, before
from waflib import Build, Utils
from waflib.TaskGen import feature, before
from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE
def SAMBA_PIDL(bld, pname, source,
@ -97,7 +97,7 @@ def SAMBA_PIDL(bld, pname, source,
pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS')
pidl_headers[name] = [bld.path.find_or_declare(out_files[table_header_idx])]
t.more_includes = '#' + bld.path.relpath_gen(bld.srcnode)
t.more_includes = '#' + bld.path.path_from(bld.srcnode)
Build.BuildContext.SAMBA_PIDL = SAMBA_PIDL

View File

@ -1,8 +1,8 @@
# waf build tool for building IDL files with pidl
import os
import Build, Logs, Utils, Configure
from Configure import conf
from waflib import Build, Logs, Utils, Configure, Errors
from waflib.Configure import conf
@conf
def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):
@ -14,12 +14,12 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):
interpreters = []
if conf.env['EXTRA_PYTHON']:
conf.all_envs['extrapython'] = conf.env.copy()
conf.all_envs['extrapython'] = conf.env.derive()
conf.setenv('extrapython')
conf.env['PYTHON'] = conf.env['EXTRA_PYTHON']
conf.env['IS_EXTRA_PYTHON'] = 'yes'
conf.find_program('python', var='PYTHON', mandatory=True)
conf.check_tool('python')
conf.load('python')
try:
conf.check_python_version((3, 3, 0))
except Exception:
@ -29,7 +29,7 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):
conf.setenv('default')
conf.find_program('python', var='PYTHON', mandatory=mandatory)
conf.check_tool('python')
conf.load('python')
path_python = conf.find_program('python')
conf.env.PYTHON_SPECIFIED = (conf.env.PYTHON != path_python)
conf.check_python_version(version)
@ -42,7 +42,7 @@ def SAMBA_CHECK_PYTHON(conf, mandatory=True, version=(2,4,2)):
def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
if conf.env.disable_python:
if mandatory:
raise Utils.WafError("Cannot check for python headers when "
raise Errors.WafError("Cannot check for python headers when "
"--disable-python specified")
conf.msg("python headers", "Check disabled due to --disable-python")
@ -66,7 +66,7 @@ def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
if conf.env['EXTRA_PYTHON']:
extraversion = conf.all_envs['extrapython']['PYTHON_VERSION']
if extraversion == conf.env['PYTHON_VERSION']:
raise Utils.WafError("extrapython %s is same as main python %s" % (
raise Errors.WafError("extrapython %s is same as main python %s" % (
extraversion, conf.env['PYTHON_VERSION']))
else:
conf.msg("python headers", "using cache")
@ -79,9 +79,9 @@ def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
def _check_python_headers(conf, mandatory):
try:
Configure.ConfigurationError
conf.errors.ConfigurationError
conf.check_python_headers()
except Configure.ConfigurationError:
except conf.errors.ConfigurationError:
if mandatory:
raise

View File

@ -1,12 +1,12 @@
# functions to support third party libraries
import os
import Utils, Build
from Configure import conf
from waflib import Utils, Build, Context
from waflib.Configure import conf
@conf
def CHECK_FOR_THIRD_PARTY(conf):
return os.path.exists(os.path.join(Utils.g_module.srcdir, 'third_party'))
return os.path.exists(os.path.join(Context.g_module.srcdir, 'third_party'))
Build.BuildContext.CHECK_FOR_THIRD_PARTY = CHECK_FOR_THIRD_PARTY

View File

@ -3,10 +3,11 @@
import os, sys, re, fnmatch, shlex, inspect
from optparse import SUPPRESS_HELP
from waflib import Build, Options, Utils, Task, Logs, Configure, Errors
from TaskGen import feature, before, after
from Configure import ConfigurationContext
from Logs import debug
from waflib import Build, Options, Utils, Task, Logs, Configure, Errors, Context
from waflib.TaskGen import feature, before, after
from waflib.Configure import ConfigurationContext
from waflib.Logs import debug
from waflib import ConfigSet
# TODO: make this a --option
LIB_PATH="shared"
@ -45,10 +46,10 @@ def SET_TARGET_TYPE(ctx, target, value):
'''set the target type of a target'''
cache = LOCAL_CACHE(ctx, 'TARGET_TYPE')
if target in cache and cache[target] != 'EMPTY':
Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.curdir, value, cache[target]))
Logs.error("ERROR: Target '%s' in directory %s re-defined as %s - was %s" % (target, ctx.path.abspath(), value, cache[target]))
sys.exit(1)
LOCAL_CACHE_SET(ctx, 'TARGET_TYPE', target, value)
debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.curdir))
debug("task_gen: Target '%s' created of type '%s' in %s" % (target, value, ctx.path.abspath()))
return True
@ -125,7 +126,7 @@ def LOCAL_CACHE_SET(ctx, cachename, key, value):
def ASSERT(ctx, expression, msg):
'''a build assert call'''
if not expression:
raise Utils.WafError("ERROR: %s\n" % msg)
raise Errors.WafError("ERROR: %s\n" % msg)
Build.BuildContext.ASSERT = ASSERT
@ -146,9 +147,9 @@ def dict_concat(d1, d2):
def ADD_COMMAND(opt, name, function):
'''add a new top level command to waf'''
Utils.g_module.__dict__[name] = function
Context.g_module.__dict__[name] = function
opt.name = function
Options.Handler.ADD_COMMAND = ADD_COMMAND
Options.OptionsContext.ADD_COMMAND = ADD_COMMAND
@feature('c', 'cc', 'cshlib', 'cprogram')
@ -223,7 +224,7 @@ def subst_vars_error(string, env):
if re.match('\$\{\w+\}', v):
vname = v[2:-1]
if not vname in env:
raise KeyError("Failed to find variable %s in %s" % (vname, string))
raise KeyError("Failed to find variable %s in %s in env %s <%s>" % (vname, string, env.__class__, str(env)))
v = env[vname]
if isinstance(v, list):
v = ' '.join(v)
@ -338,8 +339,7 @@ def EXPAND_VARIABLES(ctx, varstr, vars=None):
if not isinstance(varstr, str):
return varstr
import Environment
env = Environment.Environment()
env = ConfigSet.ConfigSet()
ret = varstr
# substitute on user supplied dict if avaiilable
if vars is not None:
@ -378,7 +378,7 @@ def RUN_COMMAND(cmd,
def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
env = LOAD_ENVIRONMENT()
if pythonpath is None:
pythonpath = os.path.join(Utils.g_module.blddir, 'python')
pythonpath = os.path.join(Context.g_module.blddir, 'python')
result = 0
for interp in env.python_interpreters:
if not isinstance(interp, str):
@ -410,8 +410,7 @@ except:
# Try to use MD5 function. In FIPS mode this will cause an exception
foo = md5.md5('abcd')
except:
import Constants
Constants.SIG_NIL = hash('abcd')
Context.SIG_NIL = hash('abcd')
class replace_md5(object):
def __init__(self):
self.val = None
@ -437,10 +436,10 @@ except:
def LOAD_ENVIRONMENT():
'''load the configuration environment, allowing access to env vars
from new commands'''
import Environment
env = Environment.Environment()
env = ConfigSet.ConfigSet()
try:
env.load('bin/c4che/default_cache.py')
p = os.path.join(Context.g_module.out, 'c4che/default_cache.py')
env.load(p)
except (OSError, IOError):
pass
return env
@ -448,8 +447,9 @@ def LOAD_ENVIRONMENT():
def IS_NEWER(bld, file1, file2):
'''return True if file1 is newer than file2'''
t1 = os.stat(os.path.join(bld.curdir, file1)).st_mtime
t2 = os.stat(os.path.join(bld.curdir, file2)).st_mtime
curdir = bld.path.abspath()
t1 = os.stat(os.path.join(curdir, file1)).st_mtime
t2 = os.stat(os.path.join(curdir, file2)).st_mtime
return t1 > t2
Build.BuildContext.IS_NEWER = IS_NEWER
@ -459,31 +459,27 @@ def RECURSE(ctx, directory):
'''recurse into a directory, relative to the curdir or top level'''
try:
visited_dirs = ctx.visited_dirs
except:
except AttributeError:
visited_dirs = ctx.visited_dirs = set()
d = os.path.join(ctx.curdir, directory)
d = os.path.join(ctx.path.abspath(), directory)
if os.path.exists(d):
abspath = os.path.abspath(d)
else:
abspath = os.path.abspath(os.path.join(Utils.g_module.srcdir, directory))
abspath = os.path.abspath(os.path.join(Context.g_module.srcdir, directory))
ctxclass = ctx.__class__.__name__
key = ctxclass + ':' + abspath
if key in visited_dirs:
# already done it
return
visited_dirs.add(key)
relpath = os_path_relpath(abspath, ctx.curdir)
relpath = os_path_relpath(abspath, ctx.path.abspath())
if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext']:
return ctx.recurse(relpath)
if 'waflib.extras.compat15' in sys.modules:
return ctx.recurse(relpath)
if ctxclass == 'Handler':
return ctx.sub_options(relpath)
if ctxclass == 'ConfigurationContext':
return ctx.sub_config(relpath)
if ctxclass == 'BuildContext':
return ctx.add_subdirs(relpath)
Logs.error('Unknown RECURSE context class', ctxclass)
Logs.error('Unknown RECURSE context class: {}'.format(ctxclass))
raise
Options.Handler.RECURSE = RECURSE
Options.OptionsContext.RECURSE = RECURSE
Build.BuildContext.RECURSE = RECURSE
@ -542,7 +538,7 @@ def option_group(opt, name):
gr = opt.add_option_group(name)
option_groups[name] = gr
return gr
Options.Handler.option_group = option_group
Options.OptionsContext.option_group = option_group
def save_file(filename, contents, create_dir=False):
@ -571,9 +567,9 @@ def load_file(filename):
def reconfigure(ctx):
'''rerun configure if necessary'''
import Configure, samba_wildcard, Scripting
if not os.path.exists(".lock-wscript"):
raise Utils.WafError('configure has not been run')
raise Errors.WafError('configure has not been run')
import samba_wildcard
bld = samba_wildcard.fake_build_environment()
Configure.autoconfig = True
Scripting.check_configured(bld)
@ -646,7 +642,7 @@ def get_tgt_list(bld):
tgt_list.append(t)
return tgt_list
from Constants import WSCRIPT_FILE
from waflib.Context import WSCRIPT_FILE
def PROCESS_SEPARATE_RULE(self, rule):
''' cause waf to process additional script based on `rule'.
You should have file named wscript_<stage>_rule in the current directory
@ -657,15 +653,21 @@ def PROCESS_SEPARATE_RULE(self, rule):
stage = 'configure'
elif isinstance(self, Build.BuildContext):
stage = 'build'
file_path = os.path.join(self.curdir, WSCRIPT_FILE+'_'+stage+'_'+rule)
txt = load_file(file_path)
if txt:
dc = {'ctx': self}
if getattr(self.__class__, 'pre_recurse', None):
dc = self.pre_recurse(txt, file_path, self.curdir)
exec(compile(txt, file_path, 'exec'), dc)
if getattr(self.__class__, 'post_recurse', None):
dc = self.post_recurse(txt, file_path, self.curdir)
file_path = os.path.join(self.path.abspath(), WSCRIPT_FILE+'_'+stage+'_'+rule)
node = self.root.find_node(file_path)
if node:
try:
cache = self.recurse_cache
except AttributeError:
cache = self.recurse_cache = {}
if node not in cache:
cache[node] = True
self.pre_recurse(node)
try:
function_code = node.read('rU', None)
exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
finally:
self.post_recurse(node)
Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
@ -722,4 +724,4 @@ def samba_add_onoff_option(opt, option, help=(), dest=None, default=True,
default=default)
opt.add_option(without_val, help=SUPPRESS_HELP, action="store_false",
dest=dest)
Options.Handler.samba_add_onoff_option = samba_add_onoff_option
Options.OptionsContext.samba_add_onoff_option = samba_add_onoff_option

View File

@ -1,5 +1,5 @@
import os
import Utils
from waflib import Utils, Context
import samba_utils
from samba_git import find_git
@ -260,5 +260,5 @@ def load_version(env=None, is_install=True):
env = samba_utils.LOAD_ENVIRONMENT()
version = samba_version_file("./VERSION", ".", env, is_install=is_install)
Utils.g_module.VERSION = version.STRING
Context.g_module.VERSION = version.STRING
return version

View File

@ -1,10 +1,10 @@
# compatibility layer for building with more recent waf versions
import os, shlex, sys
import Build, Configure, Node, Utils, Options, Logs
from waflib import Build, Configure, Node, Utils, Options, Logs
from waflib import ConfigSet
from TaskGen import feature, after
from Configure import conf, ConfigurationContext
from waflib.TaskGen import feature, after
from waflib.Configure import conf, ConfigurationContext
from waflib.Tools import bison, flex
sys.modules['bison'] = bison
@ -119,32 +119,6 @@ def find_program_samba(self, *k, **kw):
Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program
Configure.ConfigurationContext.find_program = find_program_samba
def PROCESS_SEPARATE_RULE(self, rule):
''' cause waf to process additional script based on `rule'.
You should have file named wscript_<stage>_rule in the current directory
where stage is either 'configure' or 'build'
'''
stage = ''
if isinstance(self, Configure.ConfigurationContext):
stage = 'configure'
elif isinstance(self, Build.BuildContext):
stage = 'build'
script = self.path.find_node('wscript_'+stage+'_'+rule)
if script:
txt = script.read()
bld = self
conf = self
ctx = self
dc = {'ctx': self, 'conf': self, 'bld': self}
if getattr(self.__class__, 'pre_recurse', None):
dc = self.pre_recurse(script)
exec(compile(txt, script.abspath(), 'exec'), dc)
if getattr(self.__class__, 'post_recurse', None):
dc = self.post_recurse(script)
Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada
Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada
@ -159,7 +133,7 @@ def check(self, *k, **kw):
additional_dirs = []
if 'msg' in kw:
msg = kw['msg']
for x in Options.parser.parser.option_list:
for x in Options.OptionsContext.parser.parser.option_list:
if getattr(x, 'match', None) and msg in x.match:
d = getattr(Options.options, x.dest, '')
if d:
@ -265,3 +239,34 @@ def CHECK_CFG(self, *k, **kw):
kw['mandatory'] = False
kw['global_define'] = True
return self.check_cfg(*k, **kw)
def cmd_output(cmd, **kw):
silent = False
if 'silent' in kw:
silent = kw['silent']
del(kw['silent'])
if 'e' in kw:
tmp = kw['e']
del(kw['e'])
kw['env'] = tmp
kw['shell'] = isinstance(cmd, str)
kw['stdout'] = Utils.subprocess.PIPE
if silent:
kw['stderr'] = Utils.subprocess.PIPE
try:
p = Utils.subprocess.Popen(cmd, **kw)
output = p.communicate()[0]
except OSError as e:
raise ValueError(str(e))
if p.returncode:
if not silent:
msg = "command execution failed: %s -> %r" % (cmd, str(output))
raise ValueError(msg)
output = ''
return output
Utils.cmd_output = cmd_output

View File

@ -1,15 +1,15 @@
# based on playground/evil in the waf svn tree
import os, datetime, fnmatch
import Scripting, Utils, Options, Logs, Environment
from Constants import SRCDIR, BLDDIR
from waflib import Scripting, Utils, Options, Logs, Errors
from waflib import ConfigSet
from samba_utils import LOCAL_CACHE, os_path_relpath
def run_task(t, k):
'''run a single build task'''
ret = t.run()
if ret:
raise Utils.WafError("Failed to build %s: %u" % (k, ret))
raise Errors.WafError("Failed to build %s: %u" % (k, ret))
def run_named_build_task(cmd):
@ -45,7 +45,7 @@ def run_named_build_task(cmd):
if not found:
raise Utils.WafError("Unable to find build target matching %s" % cmd)
raise Errors.WafError("Unable to find build target matching %s" % cmd)
def rewrite_compile_targets():
@ -125,7 +125,7 @@ def wildcard_main(missing_cmd_fn):
def fake_build_environment(info=True, flush=False):
"""create all the tasks for the project, but do not run the build
return the build context in use"""
bld = getattr(Utils.g_module, 'build_context', Utils.Context)()
bld = getattr(Context.g_module, 'build_context', Utils.Context)()
bld = Scripting.check_configured(bld)
Options.commands['install'] = False
@ -134,16 +134,15 @@ def fake_build_environment(info=True, flush=False):
bld.is_install = 0 # False
try:
proj = Environment.Environment(Options.lockfile)
proj = ConfigSet.ConfigSet(Options.lockfile)
except IOError:
raise Utils.WafError("Project not configured (run 'waf configure' first)")
raise Errors.WafError("Project not configured (run 'waf configure' first)")
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
if info:
Logs.info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
bld.add_subdirs([os.path.split(Context.g_module.root_path)[0]])
bld.pre_build()
if flush:

View File

@ -14,7 +14,7 @@ nodes/tasks, in which case the method will have to be modified
to exclude some folders for example.
"""
import Logs, Build, os, samba_utils, Options, Utils
import Logs, Build, os, samba_utils, Options, Utils, Errors
from Runner import Parallel
old_refill_task_list = Parallel.refill_task_list
@ -46,7 +46,7 @@ def replace_refill_task_list(self):
# paranoia
if bin_base[-4:] != '/bin':
raise Utils.WafError("Invalid bin base: %s" % bin_base)
raise Errors.WafError("Invalid bin base: %s" % bin_base)
# obtain the expected list of files
expected = []

View File

@ -2,8 +2,8 @@
# using nm, producing a set of exposed defined/undefined symbols
import os, re, subprocess
import Utils, Build, Options, Logs
from Logs import debug
from waflib import Utils, Build, Options, Logs, Errors
from waflib.Logs import debug
from samba_utils import TO_LIST, LOCAL_CACHE, get_tgt_list, os_path_relpath
# these are the data structures used in symbols.py:
@ -410,7 +410,7 @@ def check_library_deps(bld, t):
if dep2 == name and t.in_library != t2.in_library:
Logs.warn("WARNING: mutual dependency %s <=> %s" % (name, real_name(t2.sname)))
Logs.warn("Libraries should match. %s != %s" % (t.in_library, t2.in_library))
# raise Utils.WafError("illegal mutual dependency")
# raise Errors.WafError("illegal mutual dependency")
def check_syslib_collisions(bld, tgt_list):
@ -430,7 +430,7 @@ def check_syslib_collisions(bld, tgt_list):
Logs.error("ERROR: Target '%s' has symbols '%s' which is also in syslib '%s'" % (t.sname, common, lib))
has_error = True
if has_error:
raise Utils.WafError("symbols in common with system libraries")
raise Errors.WafError("symbols in common with system libraries")
def check_dependencies(bld, t):
@ -546,7 +546,7 @@ def symbols_whyneeded(task):
why = Options.options.WHYNEEDED.split(":")
if len(why) != 2:
raise Utils.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY")
raise Errors.WafError("usage: WHYNEEDED=TARGET:DEPENDENCY")
target = why[0]
subsystem = why[1]
@ -579,7 +579,7 @@ def report_duplicate(bld, binname, sym, libs, fail_on_error):
else:
libnames.append(lib)
if fail_on_error:
raise Utils.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
raise Errors.WafError("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))
else:
print("%s: Symbol %s linked in multiple libraries %s" % (binname, sym, libnames))

View File

@ -3,11 +3,11 @@
# based on suncc.py from waf
import os, optparse
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
from waflib import Utils, Options, Configure
from waflib.Tools import ccroot, ar
from waflib.Configure import conftest
from compiler_cc import c_compiler
from waflib.Tools.compiler_c import c_compiler
c_compiler['osf1V'] = ['gcc', 'tru64cc']

View File

@ -1,9 +1,10 @@
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
import Build, os, sys, Options, Task, Utils, cc, TaskGen, fnmatch, re, shutil, Logs, Constants
from Configure import conf
from Logs import debug
import os, sys, re, shutil, fnmatch
from waflib import Build, Options, Task, Utils, TaskGen, Logs, Context, Errors
from waflib.Configure import conf
from waflib.Logs import debug
from samba_utils import SUBST_VARS_RECURSIVE
TaskGen.task_gen.apply_verif = Utils.nada
@ -43,7 +44,7 @@ LIB_PATH="shared"
os.environ['PYTHONUNBUFFERED'] = '1'
if Constants.HEXVERSION not in (0x105019, 0x1090a00):
if Context.HEXVERSION not in (0x2000400,):
Logs.error('''
Please use the version of waf that comes with Samba, not
a system installed version. See http://wiki.samba.org/index.php/Waf
@ -53,26 +54,25 @@ Alternatively, please run ./configure and make as usual. That will
call the right version of waf.''')
sys.exit(1)
@conf
def SAMBA_BUILD_ENV(conf):
'''create the samba build environment'''
conf.env.BUILD_DIRECTORY = conf.blddir
mkdir_p(os.path.join(conf.blddir, LIB_PATH))
mkdir_p(os.path.join(conf.blddir, LIB_PATH, "private"))
mkdir_p(os.path.join(conf.blddir, "modules"))
mkdir_p(os.path.join(conf.blddir, 'python/samba/dcerpc'))
conf.env.BUILD_DIRECTORY = getattr(Context.g_module, Context.OUT)
mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH))
mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, LIB_PATH, "private"))
mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, "modules"))
mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'python/samba/dcerpc'))
# this allows all of the bin/shared and bin/python targets
# to be expressed in terms of build directory paths
mkdir_p(os.path.join(conf.blddir, 'default'))
mkdir_p(os.path.join(conf.env.BUILD_DIRECTORY, 'default'))
for (source, target) in [('shared', 'shared'), ('modules', 'modules'), ('python', 'python_modules')]:
link_target = os.path.join(conf.blddir, 'default/' + target)
link_target = os.path.join(conf.env.BUILD_DIRECTORY, 'default/' + target)
if not os.path.lexists(link_target):
os.symlink('../' + source, link_target)
# get perl to put the blib files in the build directory
blib_bld = os.path.join(conf.blddir, 'default/pidl/blib')
blib_src = os.path.join(conf.srcdir, 'pidl/blib')
blib_bld = os.path.join(conf.env.BUILD_DIRECTORY, 'default/pidl/blib')
blib_src = os.path.join(conf.srcnode.abspath(), 'pidl/blib')
mkdir_p(blib_bld + '/man1')
mkdir_p(blib_bld + '/man3')
if os.path.islink(blib_src):
@ -146,7 +146,7 @@ def SAMBA_LIBRARY(bld, libname, source,
public_headers = None
if private_library and public_headers:
raise Utils.WafError("private library '%s' must not have public header files" %
raise Errors.WafError("private library '%s' must not have public header files" %
libname)
if LIB_MUST_BE_PRIVATE(bld, libname):
@ -223,13 +223,13 @@ def SAMBA_LIBRARY(bld, libname, source,
# we don't want any public libraries without version numbers
if (not private_library and target_type != 'PYTHON' and not realname):
if vnum is None and soname is None:
raise Utils.WafError("public library '%s' must have a vnum" %
raise Errors.WafError("public library '%s' must have a vnum" %
libname)
if pc_files is None:
raise Utils.WafError("public library '%s' must have pkg-config file" %
raise Errors.WafError("public library '%s' must have pkg-config file" %
libname)
if public_headers is None and not bld.env['IS_EXTRA_PYTHON']:
raise Utils.WafError("public library '%s' must have header files" %
raise Errors.WafError("public library '%s' must have header files" %
libname)
if bundled_name is not None:
@ -271,7 +271,7 @@ def SAMBA_LIBRARY(bld, libname, source,
vscript = None
if bld.env.HAVE_LD_VERSION_SCRIPT:
if private_library:
version = "%s_%s" % (Utils.g_module.APPNAME, Utils.g_module.VERSION)
version = "%s_%s" % (Context.g_module.APPNAME, Context.g_module.VERSION)
elif vnum:
version = "%s_%s" % (libname, vnum)
else:
@ -284,9 +284,9 @@ def SAMBA_LIBRARY(bld, libname, source,
fullpath = bld.path.find_or_declare(fullname)
vscriptpath = bld.path.find_or_declare(vscript)
if not fullpath:
raise Utils.WafError("unable to find fullpath for %s" % fullname)
raise Errors.WafError("unable to find fullpath for %s" % fullname)
if not vscriptpath:
raise Utils.WafError("unable to find vscript path for %s" % vscript)
raise Errors.WafError("unable to find vscript path for %s" % vscript)
bld.add_manual_dependency(fullpath, vscriptpath)
if bld.is_install:
# also make the .inst file depend on the vscript
@ -758,7 +758,7 @@ def SAMBA_SCRIPT(bld, name, pattern, installdir, installname=None):
target = os.path.join(installdir, iname)
tgtdir = os.path.dirname(os.path.join(bld.srcnode.abspath(bld.env), '..', target))
mkdir_p(tgtdir)
link_src = os.path.normpath(os.path.join(bld.curdir, s))
link_src = os.path.normpath(os.path.join(bld.path.abspath(), s))
link_dst = os.path.join(tgtdir, os.path.basename(iname))
if os.path.islink(link_dst) and os.readlink(link_dst) == link_src:
continue
@ -900,7 +900,7 @@ def INSTALL_DIR(bld, path, chmod=0o755, env=None):
if not path:
return []
destpath = bld.get_install_path(path, env)
destpath = bld.EXPAND_VARIABLES(path)
if bld.is_install > 0:
if not os.path.isdir(destpath):
@ -909,7 +909,7 @@ def INSTALL_DIR(bld, path, chmod=0o755, env=None):
os.chmod(destpath, chmod)
except OSError as e:
if not os.path.isdir(destpath):
raise Utils.WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
raise Errors.WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
Build.BuildContext.INSTALL_DIR = INSTALL_DIR
def INSTALL_DIRS(bld, destdir, dirs, chmod=0o755, env=None):

View File

@ -3,7 +3,8 @@
# this is a base set of waf rules that everything else pulls in first
import os, sys
import wafsamba, Configure, Logs, Options, Utils
from waflib import Configure, Logs, Options, Utils, Context, Errors
import wafsamba
from samba_utils import os_path_relpath
from optparse import SUPPRESS_HELP
@ -16,10 +17,15 @@ from optparse import SUPPRESS_HELP
if '--enable-auto-reconfigure' in sys.argv:
Configure.autoconfig = 'clobber'
def set_options(opt):
opt.tool_options('compiler_cc')
def default_value(option, default=''):
if option in Options.options.__dict__:
return Options.options.__dict__[option]
return default
opt.tool_options('gnu_dirs')
def options(opt):
opt.load('compiler_cc')
opt.load('gnu_dirs')
gr = opt.option_group('library handling options')
@ -31,17 +37,17 @@ def set_options(opt):
help=("comma separated list of normally public libraries to build instead as private libraries. May include !LIBNAME to disable making a library private. Can be 'NONE' or 'ALL' [auto]"),
action="store", dest='PRIVATE_LIBS', default='')
extension_default = Options.options['PRIVATE_EXTENSION_DEFAULT']
extension_default = default_value('PRIVATE_EXTENSION_DEFAULT')
gr.add_option('--private-library-extension',
help=("name extension for private libraries [%s]" % extension_default),
action="store", dest='PRIVATE_EXTENSION', default=extension_default)
extension_exception = Options.options['PRIVATE_EXTENSION_EXCEPTION']
extension_exception = default_value('PRIVATE_EXTENSION_EXCEPTION')
gr.add_option('--private-extension-exception',
help=("comma separated list of libraries to not apply extension to [%s]" % extension_exception),
action="store", dest='PRIVATE_EXTENSION_EXCEPTION', default=extension_exception)
builtin_default = Options.options['BUILTIN_LIBRARIES_DEFAULT']
builtin_default = default_value('BUILTIN_LIBRARIES_DEFAULT')
gr.add_option('--builtin-libraries',
help=("command separated list of libraries to build directly into binaries [%s]" % builtin_default),
action="store", dest='BUILTIN_LIBRARIES', default=builtin_default)
@ -71,7 +77,7 @@ def set_options(opt):
action="store", dest='MODULESDIR', default='${PREFIX}/modules')
opt.add_option('--with-privatelibdir',
help=("private library directory [PREFIX/lib/%s]" % Utils.g_module.APPNAME),
help=("private library directory [PREFIX/lib/%s]" % Context.g_module.APPNAME),
action="store", dest='PRIVATELIBDIR', default=None)
opt.add_option('--with-libiconv',
@ -210,7 +216,7 @@ def set_options(opt):
@Utils.run_once
def configure(conf):
conf.env.hlist = []
conf.env.srcdir = conf.srcdir
conf.env.srcdir = conf.srcnode.abspath()
conf.define('SRCDIR', conf.env['srcdir'])
@ -220,13 +226,12 @@ def configure(conf):
conf.SETUP_CONFIGURE_CACHE(Options.options.enable_configure_cache)
# load our local waf extensions
conf.check_tool('gnu_dirs')
conf.check_tool('wafsamba')
conf.check_tool('print_commands')
conf.load('gnu_dirs')
conf.load('wafsamba')
conf.CHECK_CC_ENV()
conf.check_tool('compiler_cc')
conf.load('compiler_cc')
conf.CHECK_STANDARD_LIBPATH()
@ -236,7 +241,7 @@ def configure(conf):
# older gcc versions (< 4.4) does not work with gccdeps, so we have to see if the .d file is generated
if Options.options.enable_gccdeps:
# stale file removal - the configuration may pick up the old .pyc file
p = os.path.join(conf.srcdir, 'buildtools/wafsamba/gccdeps.pyc')
p = os.path.join(conf.env.srcdir, 'buildtools/wafsamba/gccdeps.pyc')
if os.path.exists(p):
os.remove(p)
conf.load('gccdeps')
@ -480,7 +485,7 @@ struct foo bar = { .y = 'X', .x = 1 };
# see if we need special largefile flags
if not conf.CHECK_LARGEFILE():
raise Utils.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8')
raise Errors.WafError('Samba requires large file support support, but not available on this platform: sizeof(off_t) < 8')
if conf.env.HAVE_STDDEF_H and conf.env.HAVE_STDLIB_H:
conf.DEFINE('STDC_HEADERS', 1)
@ -586,10 +591,12 @@ struct foo bar = { .y = 'X', .x = 1 };
def build(bld):
# give a more useful message if the source directory has moved
relpath = os_path_relpath(bld.curdir, bld.srcnode.abspath())
curdir = bld.path.abspath()
srcdir = bld.srcnode.abspath()
relpath = os_path_relpath(curdir, srcdir)
if relpath.find('../') != -1:
Logs.error('bld.curdir %s is not a child of %s' % (bld.curdir, bld.srcnode.abspath()))
raise Utils.WafError('''The top source directory has moved. Please run distclean and reconfigure''')
Logs.error('bld.path %s is not a child of %s' % (curdir, srcdir))
raise Errors.WafError('''The top source directory has moved. Please run distclean and reconfigure''')
bld.CHECK_MAKEFLAGS()
bld.SETUP_BUILD_GROUPS()

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
Classes related to the build phase (build, clean, install, step, etc)
@ -147,17 +147,11 @@ class BuildContext(Context.Context):
if not hasattr(self, v):
setattr(self, v, {})
def set_cur(self, cur):
self.current_group = cur
def get_cur(self):
return self.current_group
cur = property(get_cur, set_cur)
def get_variant_dir(self):
"""Getter for the variant_dir attribute"""
if not self.variant:
return self.out_dir
return os.path.join(self.out_dir, self.variant)
return os.path.join(self.out_dir, os.path.normpath(self.variant))
variant_dir = property(get_variant_dir, None)
def __call__(self, *k, **kw):
@ -185,30 +179,6 @@ class BuildContext(Context.Context):
self.add_to_group(ret, group=kw.get('group'))
return ret
def rule(self, *k, **kw):
"""
Wrapper for creating a task generator using the decorator notation. The following code::
@bld.rule(target="foo")
def _(tsk):
print("bar")
is equivalent to::
def bar(tsk):
print("bar")
bld(
target = "foo",
rule = bar,
)
"""
def f(rule):
ret = self(*k, **kw)
ret.rule = rule
return ret
return f
def __copy__(self):
"""
Build contexts cannot be copied
@ -323,7 +293,7 @@ class BuildContext(Context.Context):
Node.Nod3 = self.node_class
try:
data = cPickle.loads(data)
except Exception ,e:
except Exception as e:
Logs.debug('build: Could not pickle the build cache %s: %r', dbfn, e)
else:
for x in SAVED_ATTRS:
@ -378,15 +348,19 @@ class BuildContext(Context.Context):
try:
self.producer.start()
except KeyboardInterrupt:
self.store()
if self.is_dirty():
self.store()
raise
else:
if self.producer.dirty:
if self.is_dirty():
self.store()
if self.producer.error:
raise Errors.BuildError(self.producer.error)
def is_dirty(self):
return self.producer.dirty
def setup(self, tool, tooldir=None, funs=None):
"""
Import waf tools defined during the configuration::
@ -404,11 +378,13 @@ class BuildContext(Context.Context):
:param funs: unused variable
"""
if isinstance(tool, list):
for i in tool: self.setup(i, tooldir)
for i in tool:
self.setup(i, tooldir)
return
module = Context.load_tool(tool, tooldir)
if hasattr(module, "setup"): module.setup(self)
if hasattr(module, "setup"):
module.setup(self)
def get_env(self):
"""Getter for the env property"""
@ -548,7 +524,8 @@ class BuildContext(Context.Context):
right = '][%s%s%s]' % (col1, self.timer, col2)
cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
if cols < 7: cols = 7
if cols < 7:
cols = 7
ratio = ((cols * idx)//total) - 1
@ -621,7 +598,7 @@ class BuildContext(Context.Context):
def add_to_group(self, tgen, group=None):
"""Adds a task or a task generator to the build; there is no attempt to remove it if it was already added."""
assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.TaskBase))
assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.Task))
tgen.bld = self
self.get_group(group).append(tgen)
@ -722,10 +699,16 @@ class BuildContext(Context.Context):
def get_targets(self):
"""
Returns the task generator corresponding to the 'targets' list; used internally
by :py:meth:`waflib.Build.BuildContext.get_build_iterator` to perform partial builds::
This method returns a pair containing the index of the last build group to post,
and the list of task generator objects corresponding to the target names.
This is used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
to perform partial builds::
$ waf --targets=myprogram,myshlib
:return: the minimum build group index, and list of task generators
:rtype: tuple
"""
to_post = []
min_grp = 0
@ -753,23 +736,21 @@ class BuildContext(Context.Context):
Post task generators from the group indexed by self.current_group; used internally
by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
"""
def tgpost(tg):
try:
f = tg.post
except AttributeError:
pass
else:
f()
if self.targets == '*':
for tg in self.groups[self.current_group]:
try:
f = tg.post
except AttributeError:
pass
else:
f()
tgpost(tg)
elif self.targets:
if self.current_group < self._min_grp:
for tg in self.groups[self.current_group]:
try:
f = tg.post
except AttributeError:
pass
else:
f()
tgpost(tg)
else:
for tg in self._exact_tg:
tg.post()
@ -783,19 +764,19 @@ class BuildContext(Context.Context):
ln = self.srcnode
for tg in self.groups[self.current_group]:
try:
f = tg.post
p = tg.path
except AttributeError:
pass
else:
if tg.path.is_child_of(ln):
f()
if p.is_child_of(ln):
tgpost(tg)
def get_tasks_group(self, idx):
"""
Returns all task instances for the build group at position idx,
used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
:rtype: list of :py:class:`waflib.Task.TaskBase`
:rtype: list of :py:class:`waflib.Task.Task`
"""
tasks = []
for tg in self.groups[idx]:
@ -810,27 +791,23 @@ class BuildContext(Context.Context):
Creates a Python generator object that returns lists of tasks that may be processed in parallel.
:return: tasks which can be executed immediately
:rtype: generator returning lists of :py:class:`waflib.Task.TaskBase`
:rtype: generator returning lists of :py:class:`waflib.Task.Task`
"""
self.current_group = 0
if self.targets and self.targets != '*':
(self._min_grp, self._exact_tg) = self.get_targets()
global lazy_post
if self.post_mode != POST_LAZY:
while self.current_group < len(self.groups):
for self.current_group, _ in enumerate(self.groups):
self.post_group()
self.current_group += 1
self.current_group = 0
while self.current_group < len(self.groups):
for self.current_group, _ in enumerate(self.groups):
# first post the task generators for the group
if self.post_mode != POST_AT_ONCE:
self.post_group()
# then extract the tasks
tasks = self.get_tasks_group(self.current_group)
# if the constraints are set properly (ext_in/ext_out, before/after)
# the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
# (but leave set_file_constraints for the installation step)
@ -841,12 +818,11 @@ class BuildContext(Context.Context):
Task.set_precedence_constraints(tasks)
self.cur_tasks = tasks
self.current_group += 1
if not tasks: # return something else the build will stop
continue
yield tasks
if tasks:
yield tasks
while 1:
# the build stops once there are no tasks to process
yield []
def install_files(self, dest, files, **kw):
@ -1188,7 +1164,7 @@ class inst(Task.Task):
try:
self.copy_fun(src, tgt)
except EnvironmentError ,e:
except EnvironmentError as e:
if not os.path.exists(src):
Logs.error('File %r does not exist', src)
elif not os.path.isfile(src):
@ -1249,7 +1225,7 @@ class inst(Task.Task):
#self.uninstall.append(tgt)
try:
os.remove(tgt)
except OSError ,e:
except OSError as e:
if e.errno != errno.ENOENT:
if not getattr(self, 'uninstall_error', None):
self.uninstall_error = True
@ -1286,22 +1262,6 @@ class UninstallContext(InstallContext):
super(UninstallContext, self).__init__(**kw)
self.is_install = UNINSTALL
def execute(self):
"""
See :py:func:`waflib.Build.BuildContext.execute`.
"""
# TODO just mark the tasks are already run with hasrun=Task.SKIPPED?
try:
# do not execute any tasks
def runnable_status(self):
return Task.SKIP_ME
setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
setattr(Task.Task, 'runnable_status', runnable_status)
super(UninstallContext, self).execute()
finally:
setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
class CleanContext(BuildContext):
'''cleans the project'''
cmd = 'clean'
@ -1320,10 +1280,23 @@ class CleanContext(BuildContext):
self.store()
def clean(self):
"""Remove files from the build directory if possible, and reset the caches"""
"""
Remove most files from the build directory, and reset all caches.
Custom lists of files to clean can be declared as `bld.clean_files`.
For example, exclude `build/program/myprogram` from getting removed::
def build(bld):
bld.clean_files = bld.bldnode.ant_glob('**',
excl='.lock* config.log c4che/* config.h program/myprogram',
quiet=True, generator=True)
"""
Logs.debug('build: clean called')
if self.bldnode != self.srcnode:
if hasattr(self, 'clean_files'):
for n in self.clean_files:
n.delete()
elif self.bldnode != self.srcnode:
# would lead to a disaster if top == out
lst = []
for env in self.all_envs.values():
@ -1434,17 +1407,17 @@ class StepContext(BuildContext):
for pat in self.files.split(','):
matcher = self.get_matcher(pat)
for tg in g:
if isinstance(tg, Task.TaskBase):
if isinstance(tg, Task.Task):
lst = [tg]
else:
lst = tg.tasks
for tsk in lst:
do_exec = False
for node in getattr(tsk, 'inputs', []):
for node in tsk.inputs:
if matcher(node, output=False):
do_exec = True
break
for node in getattr(tsk, 'outputs', []):
for node in tsk.outputs:
if matcher(node, output=True):
do_exec = True
break
@ -1480,9 +1453,9 @@ class StepContext(BuildContext):
pattern = re.compile(pat)
def match(node, output):
if output == True and not out:
if output and not out:
return False
if output == False and not inn:
if not output and not inn:
return False
if anode:
@ -1502,3 +1475,4 @@ class EnvContext(BuildContext):
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
@ -48,9 +48,12 @@ class ConfigSet(object):
if 'foo' in env:
print(env['foo'])
"""
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
if key in self.table:
return True
try:
return self.parent.__contains__(key)
except AttributeError:
return False # parent may not exist
def keys(self):
"""Dict interface"""
@ -89,13 +92,13 @@ class ConfigSet(object):
def __setitem__(self, key, value):
"""
Dictionary interface: set value for key
Dictionary interface: set value from key
"""
self.table[key] = value
def __delitem__(self, key):
"""
Dictionary interface: mark the key as missing
Dictionary interface: mark the value as missing
"""
self[key] = []
@ -108,7 +111,7 @@ class ConfigSet(object):
conf.env['value']
"""
if name in self.__slots__:
return object.__getattr__(self, name)
return object.__getattribute__(self, name)
else:
return self[name]
@ -184,7 +187,8 @@ class ConfigSet(object):
:type key: string
"""
s = self[key]
if isinstance(s, str): return s
if isinstance(s, str):
return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
@ -268,8 +272,10 @@ class ConfigSet(object):
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
try:
env = env.parent
except AttributeError:
break
merged_table = {}
for table in table_list:
merged_table.update(table)
@ -356,3 +362,4 @@ class ConfigSet(object):
Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
"""
self.table = self.undo_stack.pop(-1)

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
Configuration system
@ -16,7 +16,7 @@ A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``w
* hold configuration routines such as ``find_program``, etc
"""
import os, shlex, sys, time, re, shutil
import os, re, shlex, shutil, sys, time, traceback
from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
WAF_CONFIG_LOG = 'config.log'
@ -201,17 +201,17 @@ class ConfigurationContext(Context.Context):
"""
if not env.PREFIX:
if Options.options.prefix or Utils.is_win32:
env.PREFIX = Utils.sane_path(Options.options.prefix)
env.PREFIX = Options.options.prefix
else:
env.PREFIX = ''
env.PREFIX = '/'
if not env.BINDIR:
if Options.options.bindir:
env.BINDIR = Utils.sane_path(Options.options.bindir)
env.BINDIR = Options.options.bindir
else:
env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
if not env.LIBDIR:
if Options.options.libdir:
env.LIBDIR = Utils.sane_path(Options.options.libdir)
env.LIBDIR = Options.options.libdir
else:
env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
@ -227,12 +227,12 @@ class ConfigurationContext(Context.Context):
tmpenv = self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
def load(self, input, tooldir=None, funs=None, with_sys_path=True, cache=False):
def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False):
"""
Load Waf tools, which will be imported whenever a build is started.
:param input: waf tools to import
:type input: list of string
:param tool_list: waf tools to import
:type tool_list: list of string
:param tooldir: paths for the imports
:type tooldir: list of string
:param funs: functions to execute from the waf tools
@ -241,8 +241,9 @@ class ConfigurationContext(Context.Context):
:type cache: bool
"""
tools = Utils.to_list(input)
if tooldir: tooldir = Utils.to_list(tooldir)
tools = Utils.to_list(tool_list)
if tooldir:
tooldir = Utils.to_list(tooldir)
for tool in tools:
# avoid loading the same tool more than once with the same functions
# used by composite projects
@ -257,11 +258,11 @@ class ConfigurationContext(Context.Context):
module = None
try:
module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
except ImportError ,e:
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
except Exception ,e:
except ImportError as e:
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e))
except Exception as e:
self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
self.to_log(Utils.ex_stack())
self.to_log(traceback.format_exc())
raise
if funs is not None:
@ -269,8 +270,10 @@ class ConfigurationContext(Context.Context):
else:
func = getattr(module, 'configure', None)
if func:
if type(func) is type(Utils.readf): func(self)
else: self.eval_rules(func)
if type(func) is type(Utils.readf):
func(self)
else:
self.eval_rules(func)
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
@ -373,11 +376,11 @@ def cmd_to_list(self, cmd):
return cmd
@conf
def check_waf_version(self, mini='1.8.99', maxi='2.0.0', **kw):
def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw):
"""
Raise a Configuration error if the Waf version does not strictly match the given bounds::
conf.check_waf_version(mini='1.8.99', maxi='2.0.0')
conf.check_waf_version(mini='1.9.99', maxi='2.1.0')
:type mini: number, tuple or string
:param mini: Minimum required version
@ -399,7 +402,7 @@ def find_file(self, filename, path_list=[]):
:param filename: name of the file to search for
:param path_list: list of directories to search
:return: the first occurrence filename or '' if filename could not be found
:return: the first matching filename; else a configuration exception is raised
"""
for n in Utils.to_list(filename):
for d in Utils.to_list(path_list):
@ -429,6 +432,7 @@ def find_program(self, filename, **kw):
:type msg: string
:param interpreter: interpreter for the program
:type interpreter: ConfigSet variable key
:raises: :py:class:`waflib.Errors.ConfigurationError`
"""
exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
@ -587,7 +591,7 @@ def run_build(self, *k, **kw):
try:
bld.compile()
except Errors.WafError:
ret = 'Test does not build: %s' % Utils.ex_stack()
ret = 'Test does not build: %s' % traceback.format_exc()
self.fatal(ret)
else:
ret = getattr(bld, 'retval', 0)
@ -639,3 +643,4 @@ def test(self, *k, **kw):
else:
self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
return ret

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010-2016 (ita)
# Thomas Nagy, 2010-2018 (ita)
"""
Classes and functions enabling the command system
@ -15,16 +15,16 @@ from waflib import Utils, Errors, Logs
import waflib.Node
# the following 3 constants are updated on each new release (do not touch)
HEXVERSION=0x1090a00
HEXVERSION=0x2000400
"""Constant updated on new releases"""
WAFVERSION="1.9.10"
WAFVERSION="2.0.4"
"""Constant updated on new releases"""
WAFREVISION="ae3f254315e0dcea4059703987148882ba414894"
WAFREVISION="5996879673deb7166b61a299be317a738de6891e"
"""Git revision when the waf version is updated"""
ABI = 99
ABI = 20
"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
@ -85,7 +85,6 @@ def create_context(cmd_name, *k, **kw):
:return: Context object
:rtype: :py:class:`waflib.Context.Context`
"""
global classes
for x in classes:
if x.cmd == cmd_name:
return x(*k, **kw)
@ -99,8 +98,8 @@ class store_context(type):
Context classes must provide an attribute 'cmd' representing the command name, and a function
attribute 'fun' representing the function name that the command uses.
"""
def __init__(cls, name, bases, dict):
super(store_context, cls).__init__(name, bases, dict)
def __init__(cls, name, bases, dct):
super(store_context, cls).__init__(name, bases, dct)
name = cls.__name__
if name in ('ctx', 'Context'):
@ -114,7 +113,6 @@ class store_context(type):
if not getattr(cls, 'fun', None):
cls.fun = cls.cmd
global classes
classes.insert(0, cls)
ctx = store_context('ctx', (object,), {})
@ -154,7 +152,6 @@ class Context(ctx):
try:
rd = kw['run_dir']
except KeyError:
global run_dir
rd = run_dir
# binds the context to the nodes in use to avoid a context singleton
@ -205,7 +202,6 @@ class Context(ctx):
Here, it calls the function name in the top-level wscript file. Most subclasses
redefine this method to provide additional functionality.
"""
global g_module
self.recurse([os.path.dirname(g_module.root_path)])
def pre_recurse(self, node):
@ -300,6 +296,15 @@ class Context(ctx):
raise Errors.WafError('Cannot read the folder %r' % d)
raise Errors.WafError('No wscript file in directory %s' % d)
def log_command(self, cmd, kw):
if Logs.verbose:
fmt = os.environ.get('WAF_CMD_FORMAT')
if fmt == 'string':
if not isinstance(cmd, str):
cmd = Utils.shell_escape(cmd)
Logs.debug('runner: %r', cmd)
Logs.debug('runner_env: kw=%s', kw)
def exec_command(self, cmd, **kw):
"""
Runs an external process and returns the exit status::
@ -318,11 +323,12 @@ class Context(ctx):
:type kw: dict
:returns: process exit status
:rtype: integer
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure
"""
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r', cmd)
Logs.debug('runner_env: kw=%s', kw)
self.log_command(cmd, kw)
if self.logger:
self.logger.info(cmd)
@ -354,19 +360,19 @@ class Context(ctx):
try:
ret, out, err = Utils.run_process(cmd, kw, cargs)
except Exception ,e:
except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
if out:
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
out = out.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if self.logger:
self.logger.debug('out: %s', out)
else:
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
if err:
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
err = err.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if self.logger:
self.logger.error('err: %s' % err)
else:
@ -378,29 +384,29 @@ class Context(ctx):
"""
Executes a process and returns stdout/stderr if the execution is successful.
An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
will be bound to the WafError object::
will be bound to the WafError object (configuration tests)::
def configure(conf):
out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
(out, err) = conf.cmd_and_log(cmd, input='\\n', output=waflib.Context.STDOUT)
(out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
try:
conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
except Exception ,e:
except Errors.WafError as e:
print(e.stdout, e.stderr)
:param cmd: args for subprocess.Popen
:type cmd: list or string
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
:type kw: dict
:returns: process exit status
:rtype: integer
:returns: a tuple containing the contents of stdout and stderr
:rtype: string
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
"""
subprocess = Utils.subprocess
kw['shell'] = isinstance(cmd, str)
Logs.debug('runner: %r', cmd)
self.log_command(cmd, kw)
if 'quiet' in kw:
quiet = kw['quiet']
@ -440,13 +446,13 @@ class Context(ctx):
try:
ret, out, err = Utils.run_process(cmd, kw, cargs)
except Exception ,e:
except Exception as e:
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
if not isinstance(out, str):
out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
out = out.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if not isinstance(err, str):
err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
err = err.decode(sys.stdout.encoding or 'latin-1', errors='replace')
if out and quiet != STDOUT and quiet != BOTH:
self.to_log('out: %s' % out)
@ -483,9 +489,15 @@ class Context(ctx):
if self.logger:
self.logger.info('from %s: %s' % (self.path.abspath(), msg))
try:
msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
logfile = self.logger.handlers[0].baseFilename
except AttributeError:
pass
else:
if os.environ.get('WAF_PRINT_FAILURE_LOG'):
# see #1930
msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile))
else:
msg = '%s\n(complete log in %s)' % (msg, logfile)
raise self.errors.ConfigurationError(msg, ex=ex)
def to_log(self, msg):
@ -581,9 +593,9 @@ class Context(ctx):
result = kw.get('result') or k[0]
defcolor = 'GREEN'
if result == True:
if result is True:
msg = 'ok'
elif result == False:
elif not result:
msg = 'not found'
defcolor = 'YELLOW'
else:
@ -612,7 +624,6 @@ class Context(ctx):
:param ban: list of exact file names to exclude
:type ban: list of string
"""
global waf_dir
if os.path.isdir(waf_dir):
lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
for x in lst:
@ -696,6 +707,9 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
sys.path = tooldir + sys.path
try:
__import__(tool)
except ImportError as e:
e.waf_sys_path = list(sys.path)
raise
finally:
for d in tooldir:
sys.path.remove(d)
@ -703,7 +717,8 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
Context.tools[tool] = ret
return ret
else:
if not with_sys_path: sys.path.insert(0, waf_dir)
if not with_sys_path:
sys.path.insert(0, waf_dir)
try:
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
try:
@ -713,11 +728,16 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
x = None
else: # raise an exception
__import__(tool)
except ImportError as e:
e.waf_sys_path = list(sys.path)
raise
finally:
if not with_sys_path: sys.path.remove(waf_dir)
if not with_sys_path:
sys.path.remove(waf_dir)
ret = sys.modules[x % tool]
Context.tools[tool] = ret
return ret
finally:
if not with_sys_path:
sys.path += back_path

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2010-2016 (ita)
# Thomas Nagy, 2010-2018 (ita)
"""
Exceptions used in the Waf code
@ -21,6 +21,7 @@ class WafError(Exception):
:param ex: exception causing this error (optional)
:type ex: exception
"""
Exception.__init__(self)
self.msg = msg
assert not isinstance(msg, Exception)
@ -53,7 +54,8 @@ class BuildError(WafError):
lst = ['Build failed']
for tsk in self.tasks:
txt = tsk.format_error()
if txt: lst.append(txt)
if txt:
lst.append(txt)
return '\n'.join(lst)
class ConfigurationError(WafError):
@ -67,3 +69,4 @@ class TaskRescan(WafError):
class TaskNotReady(WafError):
"""Task-specific exception type signalling that task signatures cannot be computed"""
pass

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
logging, colors, terminal width and pretty-print
@ -143,7 +143,6 @@ class log_filter(logging.Filter):
:param rec: log entry
"""
global verbose
rec.zone = rec.module
if rec.levelno >= logging.INFO:
return True
@ -257,18 +256,15 @@ def debug(*k, **kw):
"""
Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` 0
"""
global verbose
if verbose:
k = list(k)
k[0] = k[0].replace('\n', ' ')
global log
log.debug(*k, **kw)
def error(*k, **kw):
"""
Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` 2
"""
global log, verbose
log.error(*k, **kw)
if verbose > 2:
st = traceback.extract_stack()
@ -279,20 +275,19 @@ def error(*k, **kw):
buf.append(' File %r, line %d, in %s' % (filename, lineno, name))
if line:
buf.append(' %s' % line.strip())
if buf: log.error('\n'.join(buf))
if buf:
log.error('\n'.join(buf))
def warn(*k, **kw):
"""
Wraps logging.warn
"""
global log
log.warn(*k, **kw)
def info(*k, **kw):
"""
Wraps logging.info
"""
global log
log.info(*k, **kw)
def init_log():
@ -331,7 +326,11 @@ def make_logger(path, name):
:type name: string
"""
logger = logging.getLogger(name)
hdlr = logging.FileHandler(path, 'w')
if sys.hexversion > 0x3000000:
encoding = sys.stdout.encoding
else:
encoding = None
hdlr = logging.FileHandler(path, 'w', encoding=encoding)
formatter = logging.Formatter('%(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
@ -380,5 +379,5 @@ def pprint(col, msg, label='', sep='\n'):
:param sep: a string to append at the end (line separator)
:type sep: string
"""
global info
info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
Node: filesystem structure
@ -34,6 +34,7 @@ exclude_regs = '''
**/.#*
**/%*%
**/._*
**/*.swp
**/CVS
**/CVS/**
**/.cvsignore
@ -64,6 +65,52 @@ Ant patterns for files and folders to exclude while doing the
recursive traversal in :py:meth:`waflib.Node.Node.ant_glob`
"""
def ant_matcher(s, ignorecase):
reflags = re.I if ignorecase else 0
ret = []
for x in Utils.to_list(s):
x = x.replace('\\', '/').replace('//', '/')
if x.endswith('/'):
x += '**'
accu = []
for k in x.split('/'):
if k == '**':
accu.append(k)
else:
k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
k = '^%s$' % k
try:
exp = re.compile(k, flags=reflags)
except Exception as e:
raise Errors.WafError('Invalid pattern: %s' % k, e)
else:
accu.append(exp)
ret.append(accu)
return ret
def ant_sub_filter(name, nn):
ret = []
for lst in nn:
if not lst:
pass
elif lst[0] == '**':
ret.append(lst)
if len(lst) > 1:
if lst[1].match(name):
ret.append(lst[2:])
else:
ret.append([])
elif lst[0].match(name):
ret.append(lst[1:])
return ret
def ant_sub_matcher(name, pats):
nacc = ant_sub_filter(name, pats[0])
nrej = ant_sub_filter(name, pats[1])
if [] in nrej:
nacc = []
return [nacc, nrej]
class Node(object):
"""
This class is organized in two parts:
@ -125,7 +172,7 @@ class Node(object):
"""
raise Errors.WafError('nodes are not supposed to be copied')
def read(self, flags='r', encoding='ISO8859-1'):
def read(self, flags='r', encoding='latin-1'):
"""
Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`::
@ -141,7 +188,7 @@ class Node(object):
"""
return Utils.readf(self.abspath(), flags, encoding)
def write(self, data, flags='w', encoding='ISO8859-1'):
def write(self, data, flags='w', encoding='latin-1'):
"""
Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`::
@ -344,6 +391,11 @@ class Node(object):
if isinstance(lst, str):
lst = [x for x in Utils.split_path(lst) if x and x != '.']
if lst and lst[0].startswith('\\\\') and not self.parent:
node = self.ctx.root.make_node(lst[0])
node.cache_isdir = True
return node.find_node(lst[1:])
cur = self
for x in lst:
if x == '..':
@ -525,7 +577,7 @@ class Node(object):
p = p.parent
return p is node
def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True):
def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
"""
Recursive method used by :py:meth:`waflib.Node.ant_glob`.
@ -541,6 +593,8 @@ class Node(object):
:type src: bool
:param remove: remove files/folders that do not exist (True by default)
:type remove: bool
:param quiet: disable build directory traversal warnings (verbose mode)
:type quiet: bool
:returns: A generator object to iterate from
:rtype: iterator
"""
@ -568,14 +622,13 @@ class Node(object):
if isdir:
if dir:
yield node
else:
if src:
yield node
elif src:
yield node
if isdir:
node.cache_isdir = True
if maxdepth:
for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove):
for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove, quiet=quiet):
yield k
raise StopIteration
@ -607,79 +660,41 @@ class Node(object):
:type dir: bool
:param src: return files (True by default)
:type src: bool
:param remove: remove files/folders that do not exist (True by default)
:type remove: bool
:param maxdepth: maximum depth of recursion
:type maxdepth: int
:param ignorecase: ignore case while matching (False by default)
:type ignorecase: bool
:returns: The corresponding Nodes
:rtype: list of :py:class:`waflib.Node.Node` instances
:type generator: bool
:param remove: remove files/folders that do not exist (True by default)
:type remove: bool
:param quiet: disable build directory traversal warnings (verbose mode)
:type quiet: bool
:returns: Whether to evaluate the Nodes lazily, alters the type of the returned value
:rtype: by default, list of :py:class:`waflib.Node.Node` instances
"""
src = kw.get('src', True)
dir = kw.get('dir', False)
dir = kw.get('dir')
excl = kw.get('excl', exclude_regs)
incl = k and k[0] or kw.get('incl', '**')
reflags = kw.get('ignorecase', 0) and re.I
remove = kw.get('remove', True)
maxdepth = kw.get('maxdepth', 25)
ignorecase = kw.get('ignorecase', False)
quiet = kw.get('quiet', False)
pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase))
def to_pat(s):
lst = Utils.to_list(s)
ret = []
for x in lst:
x = x.replace('\\', '/').replace('//', '/')
if x.endswith('/'):
x += '**'
lst2 = x.split('/')
accu = []
for k in lst2:
if k == '**':
accu.append(k)
else:
k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
k = '^%s$' % k
try:
#print "pattern", k
accu.append(re.compile(k, flags=reflags))
except Exception ,e:
raise Errors.WafError('Invalid pattern: %s' % k, e)
ret.append(accu)
return ret
if kw.get('generator'):
return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet))
def filtre(name, nn):
ret = []
for lst in nn:
if not lst:
pass
elif lst[0] == '**':
ret.append(lst)
if len(lst) > 1:
if lst[1].match(name):
ret.append(lst[2:])
else:
ret.append([])
elif lst[0].match(name):
ret.append(lst[1:])
return ret
it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)
if kw.get('flat'):
# returns relative paths as a space-delimited string
# prefer Node objects whenever possible
return ' '.join(x.path_from(self) for x in it)
return list(it)
def accept(name, pats):
nacc = filtre(name, pats[0])
nrej = filtre(name, pats[1])
if [] in nrej:
nacc = []
return [nacc, nrej]
ret = [x for x in self.ant_iter(accept=accept, pats=[to_pat(incl), to_pat(excl)], maxdepth=kw.get('maxdepth', 25), dir=dir, src=src, remove=kw.get('remove', True))]
if kw.get('flat', False):
return ' '.join([x.path_from(self) for x in ret])
return ret
# --------------------------------------------------------------------------------
# the following methods require the source/build folders (bld.srcnode/bld.bldnode)
# using a subclass is a possibility, but is that really necessary?
# --------------------------------------------------------------------------------
# ----------------------------------------------------------------------------
# the methods below require the source/build folders (bld.srcnode/bld.bldnode)
def is_src(self):
"""
@ -784,29 +799,19 @@ class Node(object):
def find_or_declare(self, lst):
"""
Use this method in the build phase to declare output files.
Use this method in the build phase to declare output files which
are meant to be written in the build directory.
If 'self' is in build directory, it first tries to return an existing node object.
If no Node is found, it tries to find one in the source directory.
If no Node is found, a new Node object is created in the build directory, and the
intermediate folders are added.
This method creates the Node object and its parent folder
as needed.
:param lst: relative path
:type lst: string or list of string
"""
if isinstance(lst, str):
lst = [x for x in Utils.split_path(lst) if x and x != '.']
node = self.get_bld().search_node(lst)
if node:
if not os.path.isfile(node.abspath()):
node.parent.mkdir()
return node
self = self.get_src()
node = self.find_node(lst)
if node:
return node
node = self.get_bld().make_node(lst)
if isinstance(lst, str) and os.path.isabs(lst):
node = self.ctx.root.make_node(lst)
else:
node = self.get_bld().make_node(lst)
node.parent.mkdir()
return node
@ -923,22 +928,11 @@ class Node(object):
raise
return ret
# --------------------------------------------
# TODO waf 2.0, remove the sig and cache_sig attributes
def get_sig(self):
return self.h_file()
def set_sig(self, val):
# clear the cache, so that past implementation should still work
try:
del self.get_bld_sig.__cache__[(self,)]
except (AttributeError, KeyError):
pass
sig = property(get_sig, set_sig)
cache_sig = property(get_sig, set_sig)
pickle_lock = Utils.threading.Lock()
"""Lock mandatory for thread-safe node serialization"""
class Nod3(Node):
"""Mandatory subclass for thread-safe node serialization"""
pass # do not remove

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Scott Newton, 2005 (scottn)
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
Support for waf command-line options
@ -17,7 +17,7 @@ that reads the ``options`` wscript function.
import os, tempfile, optparse, sys, re
from waflib import Logs, Utils, Context, Errors
options = {}
options = optparse.Values()
"""
A global dictionary representing user-provided command-line options::
@ -46,11 +46,25 @@ class opt_parser(optparse.OptionParser):
"""
Command-line options parser.
"""
def __init__(self, ctx):
optparse.OptionParser.__init__(self, conflict_handler="resolve",
def __init__(self, ctx, allow_unknown=False):
optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
self.formatter.width = Logs.get_term_cols()
self.ctx = ctx
self.allow_unknown = allow_unknown
def _process_args(self, largs, rargs, values):
"""
Custom _process_args to allow unknown options according to the allow_unknown status
"""
while rargs:
try:
optparse.OptionParser._process_args(self,largs,rargs,values)
except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e:
if self.allow_unknown:
largs.append(e.opt_str)
else:
self.error(str(e))
def print_usage(self, file=None):
return self.print_help(file)
@ -121,7 +135,9 @@ class OptionsContext(Context.Context):
p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)')
p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]')
p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)')
p('--profile', dest='profile', default='', action='store_true', help=optparse.SUPPRESS_HELP)
p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit")
gr = self.add_option_group('Configuration options')
self.option_groups['configure options'] = gr
@ -247,31 +263,79 @@ class OptionsContext(Context.Context):
return group
return None
def parse_args(self, _args=None):
"""
Parses arguments from a list which is not necessarily the command-line.
def sanitize_path(self, path, cwd=None):
if not cwd:
cwd = Context.launch_dir
p = os.path.expanduser(path)
p = os.path.join(cwd, p)
p = os.path.normpath(p)
p = os.path.abspath(p)
return p
:param _args: arguments
:type _args: list of strings
def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
"""
global options, commands, envvars
Just parse the arguments
"""
self.parser.allow_unknown = allow_unknown
(options, leftover_args) = self.parser.parse_args(args=_args)
envvars = []
commands = []
for arg in leftover_args:
if '=' in arg:
envvars.append(arg)
else:
elif arg != 'options':
commands.append(arg)
if options.destdir:
options.destdir = Utils.sane_path(options.destdir)
for name in 'top out destdir prefix bindir libdir'.split():
# those paths are usually expanded from Context.launch_dir
if getattr(options, name, None):
path = self.sanitize_path(getattr(options, name), cwd)
setattr(options, name, path)
return options, commands, envvars
def init_module_vars(self, arg_options, arg_commands, arg_envvars):
options.__dict__.clear()
del commands[:]
del envvars[:]
options.__dict__.update(arg_options.__dict__)
commands.extend(arg_commands)
envvars.extend(arg_envvars)
for var in envvars:
(name, value) = var.split('=', 1)
os.environ[name.strip()] = value
def init_logs(self, options, commands, envvars):
Logs.verbose = options.verbose
if options.verbose >= 1:
self.load('errcheck')
colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
Logs.enable_colors(colors)
if options.zones:
Logs.zones = options.zones.split(',')
if not Logs.verbose:
Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']
if Logs.verbose > 2:
Logs.zones = ['*']
def parse_args(self, _args=None):
"""
Parses arguments from a list which is not necessarily the command-line.
Initializes the module variables options, commands and envvars
If help is requested, prints it and exit the application
:param _args: arguments
:type _args: list of strings
"""
options, commands, envvars = self.parse_cmd_args()
self.init_logs(options, commands, envvars)
self.init_module_vars(options, commands, envvars)
def execute(self):
"""
See :py:func:`waflib.Context.Context.execute`
@ -279,3 +343,4 @@ class OptionsContext(Context.Context):
super(OptionsContext, self).execute()
self.parse_args()
Utils.alloc_process_pool(options.jobs)

View File

@ -4,24 +4,50 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
Runner.py: Task scheduling and execution
"""
import random
import heapq, traceback
try:
from queue import Queue
except ImportError:
from Queue import Queue
from waflib import Utils, Task, Errors, Logs
GAP = 20
GAP = 5
"""
Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
"""
class PriorityTasks(object):
def __init__(self):
self.lst = []
def __len__(self):
return len(self.lst)
def __iter__(self):
return iter(self.lst)
def clear(self):
self.lst = []
def append(self, task):
heapq.heappush(self.lst, task)
def appendleft(self, task):
heapq.heappush(self.lst, task)
def pop(self):
return heapq.heappop(self.lst)
def extend(self, lst):
if self.lst:
for x in lst:
self.append(x)
else:
if isinstance(lst, list):
self.lst = lst
heapq.heapify(lst)
else:
self.lst = lst.lst
class Consumer(Utils.threading.Thread):
"""
Daemon thread object that executes a task. It shares a semaphore with
@ -42,7 +68,7 @@ class Consumer(Utils.threading.Thread):
"""
try:
if not self.spawner.master.stop:
self.task.process()
self.spawner.master.process_task(self.task)
finally:
self.spawner.sem.release()
self.spawner.master.out.put(self.task)
@ -53,7 +79,7 @@ class Spawner(Utils.threading.Thread):
"""
Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
:py:class:`waflib.Task.TaskBase` instance.
:py:class:`waflib.Task.Task` instance.
"""
def __init__(self, master):
Utils.threading.Thread.__init__(self)
@ -106,22 +132,25 @@ class Parallel(object):
Instance of :py:class:`waflib.Build.BuildContext`
"""
self.outstanding = Utils.deque()
"""List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
self.outstanding = PriorityTasks()
"""Heap of :py:class:`waflib.Task.Task` that may be ready to be executed"""
self.frozen = Utils.deque()
"""List of :py:class:`waflib.Task.TaskBase` that are not ready yet"""
self.postponed = PriorityTasks()
"""Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons"""
self.incomplete = set()
"""List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)"""
self.ready = Queue(0)
"""List of :py:class:`waflib.Task.TaskBase` ready to be executed by consumers"""
"""List of :py:class:`waflib.Task.Task` ready to be executed by consumers"""
self.out = Queue(0)
"""List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
"""List of :py:class:`waflib.Task.Task` returned by the task consumers"""
self.count = 0
"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
self.processed = 1
self.processed = 0
"""Amount of tasks processed"""
self.stop = False
@ -138,6 +167,11 @@ class Parallel(object):
Flag that indicates that the build cache must be saved when a task was executed
(calls :py:meth:`waflib.Build.BuildContext.store`)"""
self.revdeps = Utils.defaultdict(set)
"""
The reverse dependency graph of dependencies obtained from Task.run_after
"""
self.spawner = Spawner(self)
"""
Coordinating daemon thread that spawns thread consumers
@ -147,28 +181,26 @@ class Parallel(object):
"""
Obtains the next Task instance to run
:rtype: :py:class:`waflib.Task.TaskBase`
:rtype: :py:class:`waflib.Task.Task`
"""
if not self.outstanding:
return None
return self.outstanding.popleft()
return self.outstanding.pop()
def postpone(self, tsk):
"""
Adds the task to the list :py:attr:`waflib.Runner.Parallel.frozen`.
Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`.
The order is scrambled so as to consume as many tasks in parallel as possible.
:param tsk: task instance
:type tsk: :py:class:`waflib.Task.TaskBase`
:type tsk: :py:class:`waflib.Task.Task`
"""
if random.randint(0, 1):
self.frozen.appendleft(tsk)
else:
self.frozen.append(tsk)
self.postponed.append(tsk)
def refill_task_list(self):
"""
Adds the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
Ensures that all tasks in the current build group are complete before processing the next one.
"""
while self.count > self.numjobs * GAP:
self.get_out()
@ -176,54 +208,105 @@ class Parallel(object):
while not self.outstanding:
if self.count:
self.get_out()
elif self.frozen:
if self.outstanding:
break
elif self.postponed:
try:
cond = self.deadlock == self.processed
except AttributeError:
pass
else:
if cond:
msg = 'check the build order for the tasks'
for tsk in self.frozen:
if not tsk.run_after:
msg = 'check the methods runnable_status'
break
lst = []
for tsk in self.frozen:
lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
for tsk in self.postponed:
deps = [id(x) for x in tsk.run_after if not x.hasrun]
lst.append('%s\t-> %r' % (repr(tsk), deps))
if not deps:
lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk))
raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst))
self.deadlock = self.processed
if self.frozen:
self.outstanding.extend(self.frozen)
self.frozen.clear()
if self.postponed:
self.outstanding.extend(self.postponed)
self.postponed.clear()
elif not self.count:
self.outstanding.extend(self.biter.next())
self.total = self.bld.total()
break
if self.incomplete:
for x in self.incomplete:
for k in x.run_after:
if not k.hasrun:
break
else:
# dependency added after the build started without updating revdeps
self.incomplete.remove(x)
self.outstanding.append(x)
break
else:
raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete)
else:
tasks = next(self.biter)
ready, waiting = self.prio_and_split(tasks)
self.outstanding.extend(ready)
self.incomplete.update(waiting)
self.total = self.bld.total()
break
def add_more_tasks(self, tsk):
"""
If a task provides :py:attr:`waflib.Task.TaskBase.more_tasks`, then the tasks contained
If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained
in that list are added to the current build and will be processed before the next build group.
The priorities for dependent tasks are not re-calculated globally
:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.TaskBase`
:type tsk: :py:attr:`waflib.Task.Task`
"""
if getattr(tsk, 'more_tasks', None):
self.outstanding.extend(tsk.more_tasks)
# TODO recompute priorities globally?
ready, waiting = self.prio_and_split(tsk.more_tasks)
self.outstanding.extend(ready)
self.incomplete.update(waiting)
self.total += len(tsk.more_tasks)
def mark_finished(self, tsk):
def try_unfreeze(x):
# DAG ancestors are likely to be in the incomplete set
if x in self.incomplete:
# TODO remove dependencies to free some memory?
# x.run_after.remove(tsk)
for k in x.run_after:
if not k.hasrun:
break
else:
self.incomplete.remove(x)
self.outstanding.append(x)
if tsk in self.revdeps:
for x in self.revdeps[tsk]:
if isinstance(x, Task.TaskGroup):
x.prev.remove(tsk)
if not x.prev:
for k in x.next:
# TODO necessary optimization?
k.run_after.remove(x)
try_unfreeze(k)
# TODO necessary optimization?
x.next = []
else:
try_unfreeze(x)
del self.revdeps[tsk]
def get_out(self):
"""
Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.
:rtype: :py:attr:`waflib.Task.TaskBase`
:rtype: :py:attr:`waflib.Task.Task`
"""
tsk = self.out.get()
if not self.stop:
self.add_more_tasks(tsk)
self.mark_finished(tsk)
self.count -= 1
self.dirty = True
return tsk
@ -233,32 +316,42 @@ class Parallel(object):
Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.
:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.TaskBase`
:type tsk: :py:attr:`waflib.Task.Task`
"""
self.ready.put(tsk)
def process_task(self, tsk):
"""
Processes a task and attempts to stop the build in case of errors
"""
tsk.process()
if tsk.hasrun != Task.SUCCESS:
self.error_handler(tsk)
def skip(self, tsk):
"""
Mark a task as skipped/up-to-date
"""
tsk.hasrun = Task.SKIPPED
self.mark_finished(tsk)
def cancel(self, tsk):
"""
Mark a task as failed because of unsatisfiable dependencies
"""
tsk.hasrun = Task.CANCELED
self.mark_finished(tsk)
def error_handler(self, tsk):
"""
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
the build is executed with::
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set,
unless the build is executed with::
$ waf build -k
:param tsk: task instance
:type tsk: :py:attr:`waflib.Task.TaskBase`
:type tsk: :py:attr:`waflib.Task.Task`
"""
if hasattr(tsk, 'scan') and hasattr(tsk, 'uid'):
# TODO waf 2.0 - this breaks encapsulation
try:
del self.bld.imp_sigs[tsk.uid()]
except KeyError:
pass
if not self.bld.keep:
self.stop = True
self.error.append(tsk)
@ -274,11 +367,11 @@ class Parallel(object):
return tsk.runnable_status()
except Exception:
self.processed += 1
tsk.err_msg = Utils.ex_stack()
tsk.err_msg = traceback.format_exc()
if not self.stop and self.bld.keep:
self.skip(tsk)
if self.bld.keep == 1:
# if -k stop at the first exception, if -kk try to go as far as possible
# if -k stop on the first exception, if -kk try to go as far as possible
if Logs.verbose > 1 or not self.error:
self.error.append(tsk)
self.stop = True
@ -286,9 +379,10 @@ class Parallel(object):
if Logs.verbose > 1:
self.error.append(tsk)
return Task.EXCEPTION
tsk.hasrun = Task.EXCEPTION
tsk.hasrun = Task.EXCEPTION
self.error_handler(tsk)
return Task.EXCEPTION
def start(self):
@ -320,10 +414,9 @@ class Parallel(object):
self.processed += 1
continue
if self.stop: # stop immediately after a failure was detected
if self.stop: # stop immediately after a failure is detected
break
st = self.task_status(tsk)
if st == Task.RUN_ME:
self.count += 1
@ -332,17 +425,24 @@ class Parallel(object):
if self.numjobs == 1:
tsk.log_display(tsk.generator.bld)
try:
tsk.process()
self.process_task(tsk)
finally:
self.out.put(tsk)
else:
self.add_task(tsk)
if st == Task.ASK_LATER:
elif st == Task.ASK_LATER:
self.postpone(tsk)
elif st == Task.SKIP_ME:
self.processed += 1
self.skip(tsk)
self.add_more_tasks(tsk)
elif st == Task.CANCEL_ME:
# A dependency problem has occurred, and the
# build is most likely run with `waf -k`
if Logs.verbose > 1:
self.error.append(tsk)
self.processed += 1
self.cancel(tsk)
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
@ -350,4 +450,110 @@ class Parallel(object):
self.get_out()
self.ready.put(None)
assert (self.count == 0 or self.stop)
if not self.stop:
assert not self.count
assert not self.postponed
assert not self.incomplete
def prio_and_split(self, tasks):
"""
Label input tasks with priority values, and return a pair containing
the tasks that are ready to run and the tasks that are necessarily
waiting for other tasks to complete.
The priority system is really meant as an optional layer for optimization:
dependency cycles are found quickly, and builds should be more efficient.
A high priority number means that a task is processed first.
This method can be overridden to disable the priority system::
def prio_and_split(self, tasks):
return tasks, []
:return: A pair of task lists
:rtype: tuple
"""
# to disable:
#return tasks, []
for x in tasks:
x.visited = 0
reverse = self.revdeps
for x in tasks:
for k in x.run_after:
if isinstance(k, Task.TaskGroup):
if k.done:
pass
else:
k.done = True
for j in k.prev:
reverse[j].add(k)
else:
reverse[k].add(x)
# the priority number is not the tree depth
def visit(n):
if isinstance(n, Task.TaskGroup):
return sum(visit(k) for k in n.next)
if n.visited == 0:
n.visited = 1
if n in reverse:
rev = reverse[n]
n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev)
else:
n.prio_order = n.tree_weight
n.visited = 2
elif n.visited == 1:
raise Errors.WafError('Dependency cycle found!')
return n.prio_order
for x in tasks:
if x.visited != 0:
# must visit all to detect cycles
continue
try:
visit(x)
except Errors.WafError:
self.debug_cycles(tasks, reverse)
ready = []
waiting = []
for x in tasks:
for k in x.run_after:
if not k.hasrun:
waiting.append(x)
break
else:
ready.append(x)
return (ready, waiting)
def debug_cycles(self, tasks, reverse):
tmp = {}
for x in tasks:
tmp[x] = 0
def visit(n, acc):
if isinstance(n, Task.TaskGroup):
for k in n.next:
visit(k, acc)
return
if tmp[n] == 0:
tmp[n] = 1
for k in reverse.get(n, []):
visit(k, [n] + acc)
tmp[n] = 2
elif tmp[n] == 1:
lst = []
for tsk in acc:
lst.append(repr(tsk))
if tsk is n:
# exclude prior nodes, we want the minimum cycle
break
raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst))
for x in tasks:
visit(x, [])

View File

@ -4,10 +4,12 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"Module called for configuring, compiling and installing targets"
from __future__ import with_statement
import os, shlex, shutil, traceback, errno, sys, stat
from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
@ -28,58 +30,49 @@ def waf_entry_point(current_directory, version, wafdir):
:param wafdir: absolute path representing the directory of the waf library
:type wafdir: string
"""
Logs.init_log()
if Context.WAFVERSION != version:
Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
sys.exit(1)
if '--version' in sys.argv:
Context.run_dir = current_directory
ctx = Context.create_context('options')
ctx.curdir = current_directory
ctx.parse_args()
sys.exit(0)
# Store current directory before any chdir
Context.waf_dir = wafdir
Context.run_dir = Context.launch_dir = current_directory
start_dir = current_directory
no_climb = os.environ.get('NOCLIMB')
if len(sys.argv) > 1:
# os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones)
# os.path.join handles absolute paths
# if sys.argv[1] is not an absolute path, then it is relative to the current working directory
potential_wscript = os.path.join(current_directory, sys.argv[1])
# maybe check if the file is executable
# perhaps extract 'wscript' as a constant
if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript):
if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
# need to explicitly normalize the path, as it may contain extra '/.'
# TODO abspath?
current_directory = os.path.normpath(os.path.dirname(potential_wscript))
path = os.path.normpath(os.path.dirname(potential_wscript))
start_dir = os.path.abspath(path)
no_climb = True
sys.argv.pop(1)
Context.waf_dir = wafdir
Context.launch_dir = current_directory
ctx = Context.create_context('options')
(options, commands, env) = ctx.parse_cmd_args(allow_unknown=True)
if options.top:
start_dir = Context.run_dir = Context.top_dir = options.top
no_climb = True
if options.out:
Context.out_dir = options.out
# if 'configure' is in the commands, do not search any further
no_climb = os.environ.get('NOCLIMB')
if not no_climb:
for k in no_climb_commands:
for y in sys.argv:
for y in commands:
if y.startswith(k):
no_climb = True
break
# if --top is provided assume the build started in the top directory
for i, x in enumerate(sys.argv):
# WARNING: this modifies sys.argv
if x.startswith('--top='):
Context.run_dir = Context.top_dir = Utils.sane_path(x[6:])
sys.argv[i] = '--top=' + Context.run_dir
if x.startswith('--out='):
Context.out_dir = Utils.sane_path(x[6:])
sys.argv[i] = '--out=' + Context.out_dir
# try to find a lock file (if the project was configured)
# at the same time, store the first wscript file seen
cur = current_directory
while cur and not Context.top_dir:
cur = start_dir
while cur:
try:
lst = os.listdir(cur)
except OSError:
@ -134,14 +127,11 @@ def waf_entry_point(current_directory, version, wafdir):
break
if not Context.run_dir:
if '-h' in sys.argv or '--help' in sys.argv:
Logs.warn('No wscript file found: the help message may be incomplete')
Context.run_dir = current_directory
ctx = Context.create_context('options')
ctx.curdir = current_directory
ctx.parse_args()
if options.whelp:
Logs.warn('These are the generic options (no wscript/project found)')
ctx.parser.print_help()
sys.exit(0)
Logs.error('Waf: Run from a directory containing a file named %r', Context.WSCRIPT_FILE)
Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE)
sys.exit(1)
try:
@ -152,31 +142,40 @@ def waf_entry_point(current_directory, version, wafdir):
try:
set_main_module(os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)))
except Errors.WafError ,e:
except Errors.WafError as e:
Logs.pprint('RED', e.verbose_msg)
Logs.error(str(e))
sys.exit(1)
except Exception ,e:
except Exception as e:
Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
traceback.print_exc(file=sys.stdout)
sys.exit(2)
if '--profile' in sys.argv:
if options.profile:
import cProfile, pstats
cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(75) # or 'cumulative'
else:
try:
run_commands()
except Errors.WafError ,e:
try:
run_commands()
except:
if options.pdb:
import pdb
type, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
else:
raise
except Errors.WafError as e:
if Logs.verbose > 1:
Logs.pprint('RED', e.verbose_msg)
Logs.error(e.msg)
sys.exit(1)
except SystemExit:
raise
except Exception ,e:
except Exception as e:
traceback.print_exc(file=sys.stdout)
sys.exit(2)
except KeyboardInterrupt:
@ -217,29 +216,13 @@ def parse_options():
Parses the command-line options and initialize the logging system.
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
"""
Context.create_context('options').execute()
for var in Options.envvars:
(name, value) = var.split('=', 1)
os.environ[name.strip()] = value
ctx = Context.create_context('options')
ctx.execute()
if not Options.commands:
Options.commands = [default_cmd]
Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076
# process some internal Waf options
Logs.verbose = Options.options.verbose
#Logs.init_log()
if Options.options.zones:
Logs.zones = Options.options.zones.split(',')
if not Logs.verbose:
Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']
if Logs.verbose > 2:
Logs.zones = ['*']
Options.commands.append(default_cmd)
if Options.options.whelp:
ctx.parser.print_help()
sys.exit(0)
def run_command(cmd_name):
"""
@ -305,38 +288,53 @@ def distclean_dir(dirname):
pass
def distclean(ctx):
'''removes the build directory'''
lst = os.listdir('.')
for f in lst:
if f == Options.lockfile:
try:
proj = ConfigSet.ConfigSet(f)
except IOError:
Logs.warn('Could not read %r', f)
continue
'''removes build folders and data'''
if proj['out_dir'] != proj['top_dir']:
try:
shutil.rmtree(proj['out_dir'])
except EnvironmentError ,e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r', proj['out_dir'])
else:
distclean_dir(proj['out_dir'])
def remove_and_log(k, fun):
try:
fun(k)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r', k)
for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
p = os.path.join(k, Options.lockfile)
try:
os.remove(p)
except OSError ,e:
if e.errno != errno.ENOENT:
Logs.warn('Could not remove %r', p)
# remove waf cache folders on the top-level
if not Options.commands:
for k in os.listdir('.'):
for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split():
if k.startswith(x):
remove_and_log(k, shutil.rmtree)
# remove local waf cache folders
if not Options.commands:
for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split():
if f.startswith(x):
shutil.rmtree(f, ignore_errors=True)
# remove a build folder, if any
cur = '.'
if ctx.options.no_lock_in_top:
cur = ctx.options.out
try:
lst = os.listdir(cur)
except OSError:
Logs.warn('Could not read %r', cur)
return
if Options.lockfile in lst:
f = os.path.join(cur, Options.lockfile)
try:
env = ConfigSet.ConfigSet(f)
except EnvironmentError:
Logs.warn('Could not read %r', f)
return
if not env.out_dir or not env.top_dir:
Logs.warn('Invalid lock file %r', f)
return
if env.out_dir == env.top_dir:
distclean_dir(env.out_dir)
else:
remove_and_log(env.out_dir, shutil.rmtree)
for k in (env.out_dir, env.top_dir, env.run_dir):
p = os.path.join(k, Options.lockfile)
remove_and_log(p, os.remove)
class Dist(Context.Context):
'''creates an archive containing the project source code'''
@ -391,11 +389,11 @@ class Dist(Context.Context):
self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
try:
from hashlib import sha1
from hashlib import sha256
except ImportError:
digest = ''
else:
digest = ' (sha=%r)' % sha1(node.read(flags='rb')).hexdigest()
digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest()
Logs.info('New archive created: %s%s', self.arch_name, digest)
@ -424,11 +422,8 @@ class Dist(Context.Context):
tinfo.gname = 'root'
if os.path.isfile(p):
fu = open(p, 'rb')
try:
tar.addfile(tinfo, fileobj=fu)
finally:
fu.close()
with open(p, 'rb') as f:
tar.addfile(tinfo, fileobj=f)
else:
tar.addfile(tinfo)
@ -490,7 +485,7 @@ class Dist(Context.Context):
try:
return self.excl
except AttributeError:
self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
if Context.out_dir:
nd = self.root.find_node(Context.out_dir)
if nd:
@ -523,11 +518,7 @@ def dist(ctx):
pass
class DistCheck(Dist):
"""
Creates an archive of the project, then attempts to build the project in a temporary directory::
$ waf distcheck
"""
"""creates an archive with dist, then tries to build it"""
fun = 'distcheck'
cmd = 'distcheck'
@ -554,12 +545,9 @@ class DistCheck(Dist):
"""
import tempfile, tarfile
try:
t = tarfile.open(self.get_arch_name())
with tarfile.open(self.get_arch_name()) as t:
for x in t:
t.extract(x)
finally:
t.close()
instdir = tempfile.mkdtemp('.inst', self.get_base_name())
cmd = self.make_distcheck_cmd(instdir)
@ -613,7 +601,8 @@ def autoconfigure(execute_method):
cmd = env.config_cmd or 'configure'
if Configure.autoconfig == 'clobber':
tmp = Options.options.__dict__
Options.options.__dict__ = env.options
if env.options:
Options.options.__dict__ = env.options
try:
run_command(cmd)
finally:
@ -625,3 +614,4 @@ def autoconfigure(execute_method):
return execute_method(self)
return execute
Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)

View File

@ -4,13 +4,13 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
Tasks represent atomic operations such as processes.
"""
import os, re, sys, tempfile
import os, re, sys, tempfile, traceback
from waflib import Utils, Logs, Errors
# task states
@ -26,6 +26,9 @@ CRASHED = 2
EXCEPTION = 3
"""An exception occurred in the task execution"""
CANCELED = 4
"""A dependency for the task is missing so it was cancelled"""
SKIPPED = 8
"""The task did not have to be executed"""
@ -41,6 +44,9 @@ SKIP_ME = -2
RUN_ME = -3
"""The task must be executed"""
CANCEL_ME = -4
"""The task cannot be executed because of a dependency problem"""
COMPILE_TEMPLATE_SHELL = '''
def f(tsk):
env = tsk.env
@ -90,8 +96,7 @@ class store_task_type(type):
super(store_task_type, cls).__init__(name, bases, dict)
name = cls.__name__
if name != 'evil' and name != 'TaskBase':
global classes
if name != 'evil' and name != 'Task':
if getattr(cls, 'run_str', None):
# if a string is provided, convert it to a method
(f, dvars) = compile_fun(cls.run_str, cls.shell)
@ -112,20 +117,21 @@ class store_task_type(type):
evil = store_task_type('evil', (object,), {})
"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified"
class TaskBase(evil):
class Task(evil):
"""
Base class for all Waf tasks, which should be seen as an interface.
For illustration purposes, instances of this class will execute the attribute
'fun' in :py:meth:`waflib.Task.TaskBase.run`. When in doubt, create
subclasses of :py:class:`waflib.Task.Task` instead.
Subclasses must override these methods:
#. __str__: string to display to the user
#. runnable_status: ask the task if it should be run, skipped, or if we have to ask later
#. run: what to do to execute the task
#. post_run: what to do after the task has been executed
This class deals with the filesystem (:py:class:`waflib.Node.Node`). The method :py:class:`waflib.Task.Task.runnable_status`
uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes,
the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output
nodes (if present).
"""
vars = []
"""ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
always_run = False
"""Specify whether task instances must always be executed or not (class attribute)"""
shell = False
"""Execute the command with the shell (class attribute)"""
color = 'GREEN'
"""Color for the console display, see :py:const:`waflib.Logs.colors_lst`"""
@ -142,7 +148,7 @@ class TaskBase(evil):
after = []
"""List of task class names to execute after instances of this class"""
hcode = ''
hcode = Utils.SIG_NIL
"""String representing an additional hash for the class representation"""
keep_last_cmd = False
@ -150,32 +156,51 @@ class TaskBase(evil):
This may be useful for certain extensions but it can a lot of memory.
"""
__slots__ = ('hasrun', 'generator')
weight = 0
"""Optional weight to tune the priority for task instances.
The higher, the earlier. The weight only applies to single task objects."""
tree_weight = 0
"""Optional weight to tune the priority of task instances and whole subtrees.
The higher, the earlier."""
prio_order = 0
"""Priority order set by the scheduler on instances during the build phase.
You most likely do not need to set it.
"""
__slots__ = ('hasrun', 'generator', 'env', 'inputs', 'outputs', 'dep_nodes', 'run_after')
def __init__(self, *k, **kw):
"""
The base task class requires a task generator (set to *self* if missing)
"""
self.hasrun = NOT_RUN
try:
self.generator = kw['generator']
except KeyError:
self.generator = self
def __repr__(self):
return '\n\t{task %r: %s %s}' % (self.__class__.__name__, id(self), str(getattr(self, 'fun', '')))
self.env = kw['env']
""":py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)"""
def __str__(self):
"String to display to the user"
if hasattr(self, 'fun'):
return self.fun.__name__
return self.__class__.__name__
self.inputs = []
"""List of input nodes, which represent the files used by the task instance"""
def keyword(self):
"Display keyword used to prettify the console outputs"
if hasattr(self, 'fun'):
return 'Function'
return 'Processing'
self.outputs = []
"""List of output nodes, which represent the files created by the task instance"""
self.dep_nodes = []
"""List of additional nodes to depend on"""
self.run_after = set()
"""Set of tasks that must be executed before this one"""
def __lt__(self, other):
return self.priority() > other.priority()
def __le__(self, other):
return self.priority() >= other.priority()
def __gt__(self, other):
return self.priority() < other.priority()
def __ge__(self, other):
return self.priority() <= other.priority()
def get_cwd(self):
"""
@ -209,6 +234,15 @@ class TaskBase(evil):
x = '"%s"' % x
return x
def priority(self):
"""
Priority of execution; the higher, the earlier
:return: the priority value
:rtype: a tuple of numeric values
"""
return (self.weight + self.prio_order, - getattr(self.generator, 'tg_idx_count', 0))
def split_argfile(self, cmd):
"""
Splits a list of process commands into the executable part and its list of arguments
@ -229,6 +263,13 @@ class TaskBase(evil):
:type cmd: list of string (best) or string (process will use a shell)
:return: the return code
:rtype: int
Optional parameters:
#. cwd: current working directory (Node or string)
#. stdout: set to None to prevent waf from capturing the process standard output
#. stderr: set to None to prevent waf from capturing the process standard error
#. timeout: timeout value (Python 3)
"""
if not 'cwd' in kw:
kw['cwd'] = self.get_cwd()
@ -240,13 +281,18 @@ class TaskBase(evil):
env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
if hasattr(self, 'stdout'):
kw['stdout'] = self.stdout
if hasattr(self, 'stderr'):
kw['stderr'] = self.stderr
# workaround for command line length limit:
# http://support.microsoft.com/kb/830473
if not isinstance(cmd, str) and (len(repr(cmd)) >= 8192 if Utils.is_win32 else len(cmd) > 200000):
cmd, args = self.split_argfile(cmd)
try:
(fd, tmp) = tempfile.mkstemp()
os.write(fd, '\r\n'.join(args))
os.write(fd, '\r\n'.join(args).encode())
os.close(fd)
if Logs.verbose:
Logs.debug('argfile: @%r -> %r', tmp, args)
@ -260,36 +306,16 @@ class TaskBase(evil):
else:
return self.generator.bld.exec_command(cmd, **kw)
def runnable_status(self):
"""
Returns the Task status
:return: a task state in :py:const:`waflib.Task.RUN_ME`, :py:const:`waflib.Task.SKIP_ME` or :py:const:`waflib.Task.ASK_LATER`.
:rtype: int
"""
return RUN_ME
def uid(self):
"""
Computes a unique identifier for the task
:rtype: string or bytes
"""
return Utils.SIG_NIL
def process(self):
"""
Assume that the task has had a ``master`` which is an instance of :py:class:`waflib.Runner.Parallel`.
Execute the task and then put it back in the queue :py:attr:`waflib.Runner.Parallel.out` (may be replaced by subclassing).
Runs the task and handles errors
:return: 0 or None if everything is fine
:rtype: integer
"""
# remove the task signature immediately before it is executed
# in case of failure the task will be executed again
m = self.generator.bld.producer
# so that the task will be executed again in case of failure
try:
# TODO another place for this?
del self.generator.bld.task_sigs[self.uid()]
except KeyError:
pass
@ -297,44 +323,29 @@ class TaskBase(evil):
try:
ret = self.run()
except Exception:
self.err_msg = Utils.ex_stack()
self.err_msg = traceback.format_exc()
self.hasrun = EXCEPTION
# TODO cleanup
m.error_handler(self)
return
if ret:
self.err_code = ret
self.hasrun = CRASHED
else:
try:
self.post_run()
except Errors.WafError:
pass
except Exception:
self.err_msg = Utils.ex_stack()
self.hasrun = EXCEPTION
if ret:
self.err_code = ret
self.hasrun = CRASHED
else:
self.hasrun = SUCCESS
if self.hasrun != SUCCESS:
m.error_handler(self)
try:
self.post_run()
except Errors.WafError:
pass
except Exception:
self.err_msg = traceback.format_exc()
self.hasrun = EXCEPTION
else:
self.hasrun = SUCCESS
def run(self):
"""
Called by threads to execute the tasks. The default is empty and meant to be overridden in subclasses.
.. warning:: It is a bad idea to create nodes in this method, so avoid :py:meth:`waflib.Node.Node.ant_glob`
:rtype: int
"""
if hasattr(self, 'fun'):
return self.fun(self)
return 0
def post_run(self):
"Update build data after successful Task execution. Override in subclasses."
pass
if self.hasrun != SUCCESS and self.scan:
# rescan dependencies on next run
try:
del self.generator.bld.imp_sigs[self.uid()]
except KeyError:
pass
def log_display(self, bld):
"Writes the execution status on the context logger"
@ -367,10 +378,7 @@ class TaskBase(evil):
def cur():
# the current task position, computed as late as possible
tmp = -1
if hasattr(master, 'ready'):
tmp -= master.ready.qsize()
return master.processed + tmp
return master.processed - master.ready.qsize()
if self.generator.bld.progress_bar == 1:
return self.generator.bld.progress_line(cur(), master.total, col1, col2)
@ -406,9 +414,7 @@ class TaskBase(evil):
:return: a hash value
:rtype: string
"""
cls = self.__class__
tup = (str(cls.before), str(cls.after), str(cls.ext_in), str(cls.ext_out), cls.__name__, cls.hcode)
return hash(tup)
return (tuple(self.before), tuple(self.after), tuple(self.ext_in), tuple(self.ext_out), self.__class__.__name__, self.hcode)
def format_error(self):
"""
@ -432,6 +438,8 @@ class TaskBase(evil):
return ' -> task in %r failed%s' % (name, msg)
elif self.hasrun == MISSING:
return ' -> missing files in %r%s' % (name, msg)
elif self.hasrun == CANCELED:
return ' -> %r canceled because of missing dependencies' % name
else:
return 'invalid status for task in %r: %r' % (name, self.hasrun)
@ -442,12 +450,12 @@ class TaskBase(evil):
The results will be slightly different if FOO_ST is a list, for example::
env.FOO_ST = ['-a', '-b']
env.FOO = ['p1', 'p2']
env.FOO_ST = '-I%s'
# ${FOO_ST:FOO} returns
['-Ip1', '-Ip2']
env.FOO = ['p1', 'p2']
env.FOO_ST = ['-a', '-b']
# ${FOO_ST:FOO} returns
['-a', '-b', 'p1', '-a', '-b', 'p2']
"""
@ -468,40 +476,6 @@ class TaskBase(evil):
lst.append(y)
return lst
class Task(TaskBase):
"""
This class deals with the filesystem (:py:class:`waflib.Node.Node`). The method :py:class:`waflib.Task.Task.runnable_status`
uses a hash value (from :py:class:`waflib.Task.Task.signature`) which is persistent from build to build. When the value changes,
the task has to be executed. The method :py:class:`waflib.Task.Task.post_run` will assign the task signature to the output
nodes (if present).
"""
vars = []
"""ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
always_run = False
"""Specify whether task instances must always be executed or not (class attribute)"""
shell = False
"""Execute the command with the shell (class attribute)"""
def __init__(self, *k, **kw):
TaskBase.__init__(self, *k, **kw)
self.env = kw['env']
""":py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)"""
self.inputs = []
"""List of input nodes, which represent the files used by the task instance"""
self.outputs = []
"""List of output nodes, which represent the files created by the task instance"""
self.dep_nodes = []
"""List of additional nodes to depend on"""
self.run_after = set()
"""Set of tasks that must be executed before this one"""
def __str__(self):
"string to display to the user"
name = self.__class__.__name__
@ -517,14 +491,14 @@ class Task(TaskBase):
src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
if self.outputs: sep = ' -> '
else: sep = ''
if self.outputs:
sep = ' -> '
else:
sep = ''
return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str)
def keyword(self):
"""
See :py:meth:`waflib.Task.TaskBase`
"""
"Display keyword used to prettify the console outputs"
name = self.__class__.__name__
if name.endswith(('lib', 'program')):
return 'Linking'
@ -581,8 +555,10 @@ class Task(TaskBase):
:param inp: input nodes
:type inp: node or list of nodes
"""
if isinstance(inp, list): self.inputs += inp
else: self.inputs.append(inp)
if isinstance(inp, list):
self.inputs += inp
else:
self.inputs.append(inp)
def set_outputs(self, out):
"""
@ -591,8 +567,10 @@ class Task(TaskBase):
:param out: output nodes
:type out: node or list of nodes
"""
if isinstance(out, list): self.outputs += out
else: self.outputs.append(out)
if isinstance(out, list):
self.outputs += out
else:
self.outputs.append(out)
def set_run_after(self, task):
"""
@ -601,7 +579,7 @@ class Task(TaskBase):
:param task: task
:type task: :py:class:`waflib.Task.Task`
"""
assert isinstance(task, TaskBase)
assert isinstance(task, Task)
self.run_after.add(task)
def signature(self):
@ -650,13 +628,22 @@ class Task(TaskBase):
def runnable_status(self):
"""
See :py:meth:`waflib.Task.TaskBase.runnable_status`
Returns the Task status
:return: a task state in :py:const:`waflib.Task.RUN_ME`,
:py:const:`waflib.Task.SKIP_ME`, :py:const:`waflib.Task.CANCEL_ME` or :py:const:`waflib.Task.ASK_LATER`.
:rtype: int
"""
#return 0 # benchmarking
bld = self.generator.bld
if bld.is_install < 0:
return SKIP_ME
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
elif t.hasrun < SKIPPED:
# a dependency has an error
return CANCEL_ME
# first compute the signature
try:
@ -665,7 +652,6 @@ class Task(TaskBase):
return ASK_LATER
# compare the signature to a signature computed previously
bld = self.generator.bld
key = self.uid()
try:
prev_sig = bld.task_sigs[key]
@ -733,10 +719,11 @@ class Task(TaskBase):
continue
for v in d:
if isinstance(v, bld.root.__class__):
try:
v = v.get_bld_sig()
elif hasattr(v, '__call__'):
v = v() # dependency is a function, call it
except AttributeError:
if hasattr(v, '__call__'):
v = v() # dependency is a function, call it
upd(v)
def sig_vars(self):
@ -869,10 +856,10 @@ if sys.hexversion > 0x3000000:
try:
return self.uid_
except AttributeError:
m = Utils.md5(self.__class__.__name__.encode('iso8859-1', 'xmlcharrefreplace'))
m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace'))
up = m.update
for x in self.inputs + self.outputs:
up(x.abspath().encode('iso8859-1', 'xmlcharrefreplace'))
up(x.abspath().encode('latin-1', 'xmlcharrefreplace'))
self.uid_ = m.digest()
return self.uid_
uid.__doc__ = Task.uid.__doc__
@ -889,9 +876,9 @@ def is_before(t1, t2):
waflib.Task.is_before(t1, t2) # True
:param t1: Task object
:type t1: :py:class:`waflib.Task.TaskBase`
:type t1: :py:class:`waflib.Task.Task`
:param t2: Task object
:type t2: :py:class:`waflib.Task.TaskBase`
:type t2: :py:class:`waflib.Task.Task`
"""
to_list = Utils.to_list
for k in to_list(t2.ext_in):
@ -911,27 +898,50 @@ def set_file_constraints(tasks):
Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs
:param tasks: tasks
:type tasks: list of :py:class:`waflib.Task.TaskBase`
:type tasks: list of :py:class:`waflib.Task.Task`
"""
ins = Utils.defaultdict(set)
outs = Utils.defaultdict(set)
for x in tasks:
for a in getattr(x, 'inputs', []) + getattr(x, 'dep_nodes', []):
ins[id(a)].add(x)
for a in getattr(x, 'outputs', []):
outs[id(a)].add(x)
for a in x.inputs:
ins[a].add(x)
for a in x.dep_nodes:
ins[a].add(x)
for a in x.outputs:
outs[a].add(x)
links = set(ins.keys()).intersection(outs.keys())
for k in links:
for a in ins[k]:
a.run_after.update(outs[k])
class TaskGroup(object):
"""
Wrap nxm task order constraints into a single object
to prevent the creation of large list/set objects
This is an optimization
"""
def __init__(self, prev, next):
self.prev = prev
self.next = next
self.done = False
def get_hasrun(self):
for k in self.prev:
if not k.hasrun:
return NOT_RUN
return SUCCESS
hasrun = property(get_hasrun, None)
def set_precedence_constraints(tasks):
"""
Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes
:param tasks: tasks
:type tasks: list of :py:class:`waflib.Task.TaskBase`
:type tasks: list of :py:class:`waflib.Task.Task`
"""
cstr_groups = Utils.defaultdict(list)
for x in tasks:
@ -957,9 +967,16 @@ def set_precedence_constraints(tasks):
else:
continue
aval = set(cstr_groups[keys[a]])
for x in cstr_groups[keys[b]]:
x.run_after.update(aval)
a = cstr_groups[keys[a]]
b = cstr_groups[keys[b]]
if len(a) < 2 or len(b) < 2:
for x in b:
x.run_after.update(a)
else:
group = TaskGroup(set(a), set(b))
for x in b:
x.run_after.add(group)
def funex(c):
"""
@ -1011,11 +1028,15 @@ def compile_fun_shell(line):
app = parm.append
for (var, meth) in extr:
if var == 'SRC':
if meth: app('tsk.inputs%s' % meth)
else: app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
if meth:
app('tsk.inputs%s' % meth)
else:
app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
elif var == 'TGT':
if meth: app('tsk.outputs%s' % meth)
else: app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
if meth:
app('tsk.outputs%s' % meth)
else:
app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
elif meth:
if meth.startswith(':'):
if var not in dvars:
@ -1043,8 +1064,10 @@ def compile_fun_shell(line):
if var not in dvars:
dvars.append(var)
app("p('%s')" % var)
if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
else: parm = ''
if parm:
parm = "%% (%s) " % (',\n\t\t'.join(parm))
else:
parm = ''
c = COMPILE_TEMPLATE_SHELL % (line, parm)
Logs.debug('action: %s', c.strip().splitlines())
@ -1136,7 +1159,7 @@ def compile_fun(line, shell=False):
"""
Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing:
* The function created (compiled) for use as :py:meth:`waflib.Task.TaskBase.run`
* The function created (compiled) for use as :py:meth:`waflib.Task.Task.run`
* The list of variables that must cause rebuilds when *env* data is modified
for example::
@ -1208,7 +1231,6 @@ def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[
params['run'] = func
cls = type(Task)(name, (Task,), params)
global classes
classes[name] = cls
if ext_in:
@ -1222,21 +1244,6 @@ def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[
return cls
TaskBase = Task
"Provided for compatibility reasons, TaskBase should not be used"
def always_run(cls):
"""
Deprecated Task class decorator (to be removed in waf 2.0)
Set all task instances of this class to be executed whenever a build is started
The task signature is calculated, but the result of the comparison between
task signatures is bypassed
"""
Logs.warn('This decorator is deprecated, set always_run on the task class instead!')
cls.always_run = True
return cls
def update_outputs(cls):
"""
Obsolete, to be removed in waf 2.0
"""
return cls

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
Task generators
@ -24,7 +24,7 @@ HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']
class task_gen(object):
"""
Instances of this class create :py:class:`waflib.Task.TaskBase` when
Instances of this class create :py:class:`waflib.Task.Task` when
calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
A few notes:
@ -38,7 +38,7 @@ class task_gen(object):
mappings = Utils.ordered_iter_dict()
"""Mappings are global file extension mappings that are retrieved in the order of definition"""
prec = Utils.defaultdict(list)
prec = Utils.defaultdict(set)
"""Dict that holds the precedence execution rules for task generator methods"""
def __init__(self, *k, **kw):
@ -52,7 +52,7 @@ class task_gen(object):
The extra key/value elements passed in ``kw`` are set as attributes
"""
self.source = ''
self.source = []
self.target = ''
self.meths = []
@ -80,12 +80,20 @@ class task_gen(object):
self.env = self.bld.env.derive()
self.path = self.bld.path # emulate chdir when reading scripts
# provide a unique id
# Provide a unique index per folder
# This is part of a measure to prevent output file name collisions
path = self.path.abspath()
try:
self.idx = self.bld.idx[self.path] = self.bld.idx.get(self.path, 0) + 1
self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1
except AttributeError:
self.bld.idx = {}
self.idx = self.bld.idx[self.path] = 1
self.idx = self.bld.idx[path] = 1
# Record the global task generator count
try:
self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1
except AttributeError:
self.tg_idx_count = self.bld.tg_idx_count = 1
for key, val in kw.items():
setattr(self, key, val)
@ -190,11 +198,12 @@ class task_gen(object):
tmp = []
for a in keys:
for x in prec.values():
if a in x: break
if a in x:
break
else:
tmp.append(a)
tmp.sort()
tmp.sort(reverse=True)
# topological sort
out = []
@ -214,13 +223,13 @@ class task_gen(object):
break
else:
tmp.append(x)
tmp.sort(reverse=True)
if prec:
buf = ['Cycle detected in the method execution:']
for k, v in prec.items():
buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
raise Errors.WafError('\n'.join(buf))
out.reverse()
self.meths = out
# then we run the methods in order
@ -268,7 +277,7 @@ class task_gen(object):
:param tgt: output nodes
:type tgt: list of :py:class:`waflib.Tools.Node.Node`
:return: A task object
:rtype: :py:class:`waflib.Task.TaskBase`
:rtype: :py:class:`waflib.Task.Task`
"""
task = Task.classes[name](env=self.env.derive(), generator=self)
if src:
@ -434,9 +443,7 @@ def before_method(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
#task_gen.prec[fun_name].sort()
task_gen.prec[func.__name__].add(fun_name)
return func
return deco
before = before_method
@ -463,9 +470,7 @@ def after_method(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
#task_gen.prec[func.__name__].sort()
task_gen.prec[fun_name].add(func.__name__)
return func
return deco
after = after_method
@ -491,14 +496,11 @@ def extension(*k):
return func
return deco
# ---------------------------------------------------------------
# The following methods are task generator methods commonly used
# they are almost examples, the rest of waf core does not depend on them
@taskgen_method
def to_nodes(self, lst, path=None):
"""
Converts the input list into a list of nodes.
Flatten the input list of string/nodes/lists into a list of nodes.
It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
@ -515,14 +517,16 @@ def to_nodes(self, lst, path=None):
if isinstance(lst, Node.Node):
lst = [lst]
# either a list or a string, convert to a list of nodes
for x in Utils.to_list(lst):
if isinstance(x, str):
node = find(x)
else:
elif hasattr(x, 'name'):
node = x
else:
tmp.extend(self.to_nodes(x))
continue
if not node:
raise Errors.WafError("source not found: %r in %r" % (x, self))
raise Errors.WafError('source not found: %r in %r' % (x, self))
tmp.append(node)
return tmp
@ -549,6 +553,24 @@ def process_rule(self):
def build(bld):
bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
Main attributes processed:
* rule: command to execute, it can be a tuple of strings for multiple commands
* chmod: permissions for the resulting files (integer value such as Utils.O755)
* shell: set to False to execute the command directly (default is True to use a shell)
* scan: scanner function
* vars: list of variables to trigger rebuilts, such as CFLAGS
* cls_str: string to display when executing the task
* cls_keyword: label to display when executing the task
* cache_rule: by default, try to re-use similar classes, set to False to disable
* source: list of Node or string objects representing the source files required by this task
* target: list of Node or string objects representing the files that this task creates
* cwd: current working directory (Node or string)
* stdout: standard output, set to None to prevent waf from capturing the text
* stderr: standard error, set to None to prevent waf from capturing the text
* timeout: timeout for command execution (Python 3)
* always: whether to always run the command (False by default)
"""
if not getattr(self, 'rule', None):
return
@ -617,17 +639,21 @@ def process_rule(self):
return [nodes, []]
cls.scan = scan
# TODO use these values in the cache key if provided
# (may cause excessive caching)
for x in ('after', 'before', 'ext_in', 'ext_out'):
setattr(cls, x, getattr(self, x, []))
if use_cache:
cache[key] = cls
# now create one instance
tsk = self.create_task(name)
for x in ('after', 'before', 'ext_in', 'ext_out'):
setattr(tsk, x, getattr(self, x, []))
if hasattr(self, 'stdout'):
tsk.stdout = self.stdout
if hasattr(self, 'stderr'):
tsk.stderr = self.stderr
if getattr(self, 'timeout', None):
tsk.timeout = self.timeout
@ -663,7 +689,6 @@ def process_rule(self):
# methods during instance attribute look-up."
tsk.run = functools.partial(tsk.run, tsk)
@feature('seq')
def sequence_order(self):
"""
@ -721,6 +746,8 @@ class subst_pc(Task.Task):
if getattr(self.generator, 'is_copy', None):
for i, x in enumerate(self.outputs):
x.write(self.inputs[i].read('rb'), 'wb')
stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy
os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime))
self.force_permissions()
return None
@ -730,11 +757,11 @@ class subst_pc(Task.Task):
self.force_permissions()
return ret
code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1'))
if getattr(self.generator, 'subst_fun', None):
code = self.generator.subst_fun(self, code)
if code is not None:
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.force_permissions()
return None
@ -749,7 +776,6 @@ class subst_pc(Task.Task):
lst.append(g(1))
return "%%(%s)s" % g(1)
return ''
global re_m4
code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)
try:
@ -765,12 +791,14 @@ class subst_pc(Task.Task):
d[x] = tmp
code = code % d
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
self.generator.bld.raw_deps[self.uid()] = lst
# make sure the signature is updated
try: delattr(self, 'cache_sig')
except AttributeError: pass
try:
delattr(self, 'cache_sig')
except AttributeError:
pass
self.force_permissions()
@ -783,9 +811,9 @@ class subst_pc(Task.Task):
upd = self.m.update
if getattr(self.generator, 'fun', None):
upd(Utils.h_fun(self.generator.fun))
upd(Utils.h_fun(self.generator.fun).encode())
if getattr(self.generator, 'subst_fun', None):
upd(Utils.h_fun(self.generator.subst_fun))
upd(Utils.h_fun(self.generator.subst_fun).encode())
# raw_deps: persistent custom values returned by the scanner
vars = self.generator.bld.raw_deps.get(self.uid(), [])
@ -867,21 +895,17 @@ def process_subst(self):
if not a:
raise Errors.WafError('could not find %r for %r' % (x, self))
has_constraints = False
tsk = self.create_task('subst', a, b)
for k in ('after', 'before', 'ext_in', 'ext_out'):
val = getattr(self, k, None)
if val:
has_constraints = True
setattr(tsk, k, val)
# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
if not has_constraints:
global HEADER_EXTS
for xt in HEADER_EXTS:
if b.name.endswith(xt):
tsk.before = [k for k in ('c', 'cxx') if k in Task.classes]
break
for xt in HEADER_EXTS:
if b.name.endswith(xt):
tsk.ext_in = tsk.ext_in + ['.h']
break
inst_to = getattr(self, 'install_path', None)
if inst_to:
@ -889,3 +913,4 @@ def process_subst(self):
install_from=b, chmod=getattr(self, 'chmod', Utils.O644))
self.source = []

View File

@ -4,4 +4,4 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
"""
@ -25,3 +25,4 @@ def configure(conf):
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
conf.env.ARFLAGS = ['rcs']

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2016 (ita)
# Thomas Nagy, 2008-2018 (ita)
"""
Assembly support, used by tools such as gas and nasm

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy 2009-2016 (ita)
# Thomas Nagy 2009-2018 (ita)
"""
The **bison** program is a code generator which creates C or C++ files.
@ -50,3 +50,4 @@ def configure(conf):
"""
conf.find_program('bison', var='BISON')
conf.env.BISONFLAGS = ['-d']

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"Base for c programs/libraries"
@ -40,3 +40,4 @@ class cshlib(cprogram):
class cstlib(stlink_task):
"Links object files into a c static libraries"
pass # do not remove

View File

@ -145,3 +145,4 @@ def objects(bld, *k, **kw):
"""
set_features(kw, 'objects')
return bld(*k, **kw)

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
C/C++/D configuration helpers
@ -23,32 +23,6 @@ WAF_CONFIG_H = 'config.h'
DEFKEYS = 'define_key'
INCKEYS = 'include_key'
cfg_ver = {
'atleast-version': '>=',
'exact-version': '==',
'max-version': '<=',
}
SNIP_FUNCTION = '''
int main(int argc, char **argv) {
void (*p)();
(void)argc; (void)argv;
p=(void(*)())(%s);
return !p;
}
'''
"""Code template for checking for functions"""
SNIP_TYPE = '''
int main(int argc, char **argv) {
(void)argc; (void)argv;
if ((%(type_name)s *) 0) return 0;
if (sizeof (%(type_name)s)) return 0;
return 1;
}
'''
"""Code template for checking for types"""
SNIP_EMPTY_PROGRAM = '''
int main(int argc, char **argv) {
(void)argc; (void)argv;
@ -56,15 +30,6 @@ int main(int argc, char **argv) {
}
'''
SNIP_FIELD = '''
int main(int argc, char **argv) {
char *off;
(void)argc; (void)argv;
off = (char*) &((%(type_name)s*)0)->%(field_name)s;
return (size_t) off < sizeof(%(type_name)s);
}
'''
MACRO_TO_DESTOS = {
'__linux__' : 'linux',
'__GNU__' : 'gnu', # hurd
@ -205,7 +170,8 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
static = False
elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'):
app('LINKFLAGS', x)
elif x.startswith(('-m', '-f', '-dynamic', '-O')):
elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')):
# Adding the -W option breaks python builds on Openindiana
app('CFLAGS', x)
app('CXXFLAGS', x)
elif x.startswith('-bundle'):
@ -243,55 +209,42 @@ def validate_cfg(self, kw):
self.find_program('pkg-config', var='PKGCONFIG')
kw['path'] = self.env.PKGCONFIG
# pkg-config version
if 'atleast_pkgconfig_version' in kw:
if not 'msg' in kw:
# verify that exactly one action is requested
s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw)
if s != 1:
raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set')
if not 'msg' in kw:
if 'atleast_pkgconfig_version' in kw:
kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version']
return
elif 'modversion' in kw:
kw['msg'] = 'Checking for %r version' % kw['modversion']
else:
kw['msg'] = 'Checking for %r' %(kw['package'])
if not 'okmsg' in kw:
# let the modversion check set the okmsg to the detected version
if not 'okmsg' in kw and not 'modversion' in kw:
kw['okmsg'] = 'yes'
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
if 'modversion' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for %r version' % kw['modversion']
# pkg-config version
if 'atleast_pkgconfig_version' in kw:
pass
elif 'modversion' in kw:
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['modversion']
if not 'define_name' in kw:
kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store'])
return
if not 'package' in kw:
raise ValueError('a package name is required')
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['package'].upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(kw['uselib_store'])
if not 'msg' in kw:
kw['msg'] = 'Checking for %r' % (kw['package'] or kw['path'])
for x in cfg_ver:
# Gotcha: only one predicate is allowed at a time
# TODO remove in waf 2.0
y = x.replace('-', '_')
if y in kw:
package = kw['package']
if Logs.verbose:
Logs.warn('Passing %r to conf.check_cfg() is obsolete, pass parameters directly, eg:', y)
Logs.warn(" conf.check_cfg(package='%s', args=['--libs', '--cflags', '%s >= 1.6'])", package, package)
if not 'msg' in kw:
kw['msg'] = 'Checking for %r %s %s' % (package, cfg_ver[x], kw[y])
break
else:
if not 'uselib_store' in kw:
kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(kw['uselib_store'])
@conf
def exec_cfg(self, kw):
"""
Executes ``pkg-config`` or other ``-config`` applications to colect configuration flags:
Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags:
* if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
* if modversion is given, then return the module version
@ -335,23 +288,13 @@ def exec_cfg(self, kw):
if 'atleast_pkgconfig_version' in kw:
cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
self.cmd_and_log(cmd, env=env)
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
return
for x in cfg_ver:
# TODO remove in waf 2.0
y = x.replace('-', '_')
if y in kw:
self.cmd_and_log(path + ['--%s=%s' % (x, kw[y]), kw['package']], env=env)
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
define_it()
break
# single version for a module
if 'modversion' in kw:
version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip()
if not 'okmsg' in kw:
kw['okmsg'] = version
self.define(kw['define_name'], version)
return version
@ -381,14 +324,10 @@ def exec_cfg(self, kw):
val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip()
var = '%s_%s' % (kw['uselib_store'], v)
v_env[var] = val
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
return
# so we assume the command-line will output flags to be parsed afterwards
ret = self.cmd_and_log(lst, env=env)
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
define_it()
self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix'))
@ -405,8 +344,6 @@ def check_cfg(self, *k, **kw):
def configure(conf):
conf.load('compiler_c')
conf.check_cfg(package='glib-2.0', args='--libs --cflags')
conf.check_cfg(package='glib-2.0', uselib_store='GLIB', atleast_version='2.10.0',
args='--cflags --libs')
conf.check_cfg(package='pango')
conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs'])
conf.check_cfg(package='pango',
@ -419,11 +356,6 @@ def check_cfg(self, *k, **kw):
conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO')
print(conf.env.FOO_includedir)
"""
if k:
lst = k[0].split()
kw['package'] = lst[0]
kw['args'] = ' '.join(lst[1:])
self.validate_cfg(kw)
if 'msg' in kw:
self.start_msg(kw['msg'], **kw)
@ -490,6 +422,9 @@ def validate_c(self, kw):
:param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
:type auto_add_header_name: bool
"""
for x in ('type_name', 'field_name', 'function_name'):
if x in kw:
Logs.warn('Invalid argument %r in test' % x)
if not 'build_fun' in kw:
kw['build_fun'] = build_fun
@ -510,7 +445,7 @@ def validate_c(self, kw):
if not 'compile_mode' in kw:
kw['compile_mode'] = 'c'
if 'cxx' in Utils.to_list(kw.get('features',[])) or kw.get('compiler', '') == 'cxx':
if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx':
kw['compile_mode'] = 'cxx'
if not 'type' in kw:
@ -533,50 +468,19 @@ def validate_c(self, kw):
return ''.join(['#include <%s>\n' % x for x in dct])
return ''
#OSX
if 'framework_name' in kw:
# OSX, not sure this is used anywhere
fwkname = kw['framework_name']
if not 'uselib_store' in kw:
kw['uselib_store'] = fwkname.upper()
if not kw.get('no_header', False):
if not 'header_name' in kw:
kw['header_name'] = []
if not kw.get('no_header'):
fwk = '%s/%s.h' % (fwkname, fwkname)
if kw.get('remove_dot_h'):
fwk = fwk[:-2]
kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
val = kw.get('header_name', [])
kw['header_name'] = Utils.to_list(val) + [fwk]
kw['msg'] = 'Checking for framework %s' % fwkname
kw['framework'] = fwkname
#kw['frameworkpath'] = set it yourself
if 'function_name' in kw:
fu = kw['function_name']
if not 'msg' in kw:
kw['msg'] = 'Checking for function %s' % fu
kw['code'] = to_header(kw) + SNIP_FUNCTION % fu
if not 'uselib_store' in kw:
kw['uselib_store'] = fu.upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(fu)
elif 'type_name' in kw:
tu = kw['type_name']
if not 'header_name' in kw:
kw['header_name'] = 'stdint.h'
if 'field_name' in kw:
field = kw['field_name']
kw['code'] = to_header(kw) + SNIP_FIELD % {'type_name' : tu, 'field_name' : field}
if not 'msg' in kw:
kw['msg'] = 'Checking for field %s in %s' % (field, tu)
if not 'define_name' in kw:
kw['define_name'] = self.have_define((tu + '_' + field).upper())
else:
kw['code'] = to_header(kw) + SNIP_TYPE % {'type_name' : tu}
if not 'msg' in kw:
kw['msg'] = 'Checking for type %s' % tu
if not 'define_name' in kw:
kw['define_name'] = self.have_define(tu.upper())
elif 'header_name' in kw:
if not 'msg' in kw:
@ -639,11 +543,12 @@ def validate_c(self, kw):
kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']
# in case defines lead to very long command-lines
if kw.get('merge_config_header', False) or env.merge_config_header:
if kw.get('merge_config_header') or env.merge_config_header:
kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
env.DEFINES = [] # modify the copy
if not kw.get('success'): kw['success'] = None
if not kw.get('success'):
kw['success'] = None
if 'define_name' in kw:
self.undefine(kw['define_name'])
@ -659,7 +564,7 @@ def post_check(self, *k, **kw):
is_success = 0
if kw['execute']:
if kw['success'] is not None:
if kw.get('define_ret', False):
if kw.get('define_ret'):
is_success = kw['success']
else:
is_success = (kw['success'] == 0)
@ -667,7 +572,6 @@ def post_check(self, *k, **kw):
is_success = (kw['success'] == 0)
if kw.get('define_name'):
# TODO this is still way too complicated
comment = kw.get('comment', '')
define_name = kw['define_name']
if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str):
@ -698,7 +602,7 @@ def post_check(self, *k, **kw):
self.env[define_name] = int(is_success)
if 'header_name' in kw:
if kw.get('auto_add_header_name', False):
if kw.get('auto_add_header_name'):
self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))
if is_success and 'uselib_store' in kw:
@ -986,7 +890,8 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True,
:type define_prefix: string
:param define_prefix: prefix all the defines in the file with a particular prefix
"""
if not configfile: configfile = WAF_CONFIG_H
if not configfile:
configfile = WAF_CONFIG_H
waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile)
node = top and self.bldnode or self.path.get_bld()
@ -1110,8 +1015,8 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
cmd = cc + ['-dM', '-E', '-']
env = conf.env.env or None
try:
out, err = conf.cmd_and_log(cmd, output=0, input='\n', env=env)
except Exception:
out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env)
except Errors.WafError:
conf.fatal('Could not determine the compiler version %r' % cmd)
if gcc:
@ -1159,6 +1064,8 @@ def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
conf.env.DEST_BINFMT = 'elf'
elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
conf.env.DEST_BINFMT = 'pe'
if not conf.env.IMPLIBDIR:
conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files
conf.env.LIBDIR = conf.env.BINDIR
elif isD('__APPLE__'):
conf.env.DEST_BINFMT = 'mac-o'
@ -1218,7 +1125,7 @@ def get_suncc_version(conf, cc):
cmd = cc + ['-V']
try:
out, err = conf.cmd_and_log(cmd, output=0)
except Errors.WafError ,e:
except Errors.WafError as e:
# Older versions of the compiler exit with non-zero status when reporting their version
if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')):
conf.fatal('Could not find suncc %r' % cmd)
@ -1252,14 +1159,14 @@ def add_as_needed(self):
# ============ parallel configuration
class cfgtask(Task.TaskBase):
class cfgtask(Task.Task):
"""
A task that executes build configuration tests (calls conf.check)
Make sure to use locks if concurrent access to the same conf.env data is necessary.
"""
def __init__(self, *k, **kw):
Task.TaskBase.__init__(self, *k, **kw)
Task.Task.__init__(self, *k, **kw)
self.run_after = set()
def display(self):
@ -1274,6 +1181,9 @@ class cfgtask(Task.TaskBase):
def uid(self):
return Utils.SIG_NIL
def signature(self):
return Utils.SIG_NIL
def run(self):
conf = self.conf
bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath())
@ -1301,7 +1211,7 @@ class cfgtask(Task.TaskBase):
return 1
def process(self):
Task.TaskBase.process(self)
Task.Task.process(self)
if 'msg' in self.args:
with self.generator.bld.multicheck_lock:
self.conf.start_msg(self.args['msg'])
@ -1357,11 +1267,12 @@ def multicheck(self, *k, **kw):
bld = par()
bld.keep = kw.get('run_all_tests', True)
bld.imp_sigs = {}
tasks = []
id_to_task = {}
for dct in k:
x = Task.classes['cfgtask'](bld=bld)
x = Task.classes['cfgtask'](bld=bld, env=None)
tasks.append(x)
x.args = dct
x.bld = bld
@ -1424,3 +1335,22 @@ def multicheck(self, *k, **kw):
if x.hasrun != Task.SUCCESS:
if x.args.get('mandatory', True):
self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information')
@conf
def check_gcc_o_space(self, mode='c'):
if int(self.env.CC_VERSION[0]) > 4:
# this is for old compilers
return
self.env.stash()
if mode == 'c':
self.env.CCLNK_TGT_F = ['-o', '']
elif mode == 'cxx':
self.env.CXXLNK_TGT_F = ['-o', '']
features = '%s %sshlib' % (mode, mode)
try:
self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features)
except self.errors.ConfigurationError:
self.env.revert()
else:
self.env.commit()

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2008-2016 (ita)
# Thomas Nagy 2008-2018 (ita)
"""
MacOSX related tools
@ -194,3 +194,4 @@ class macplist(Task.Task):
context = getattr(self, 'context', {})
txt = txt.format(**context)
self.outputs[0].write(txt)

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
C/C++ preprocessor for finding dependencies
@ -48,15 +48,15 @@ recursion_limit = 150
go_absolute = False
"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
standard_includes = ['/usr/include']
standard_includes = ['/usr/local/include', '/usr/include']
if Utils.is_win32:
standard_includes = []
use_trigraphs = 0
"""Apply trigraph rules (False by default)"""
# obsolete, do not use
strict_quotes = 0
"""Reserve the "#include <>" quotes for system includes (do not search for those includes). False by default."""
g_optrans = {
'not':'!',
@ -159,22 +159,6 @@ for x, syms in enumerate(ops):
for u in syms.split():
prec[u] = x
def trimquotes(s):
"""
Remove the single quotes around an expression::
trimquotes("'test'") == "test"
:param s: expression to transform
:type s: string
:rtype: string
"""
# TODO remove in waf 2.0
if not s: return ''
s = s.rstrip()
if s[0] == "'" and s[-1] == "'": return s[1:-1]
return s
def reduce_nums(val_1, val_2, val_op):
"""
Apply arithmetic rules to compute a result
@ -190,32 +174,56 @@ def reduce_nums(val_1, val_2, val_op):
#print val_1, val_2, val_op
# now perform the operation, make certain a and b are numeric
try: a = 0 + val_1
except TypeError: a = int(val_1)
try: b = 0 + val_2
except TypeError: b = int(val_2)
try:
a = 0 + val_1
except TypeError:
a = int(val_1)
try:
b = 0 + val_2
except TypeError:
b = int(val_2)
d = val_op
if d == '%': c = a%b
elif d=='+': c = a+b
elif d=='-': c = a-b
elif d=='*': c = a*b
elif d=='/': c = a/b
elif d=='^': c = a^b
elif d=='==': c = int(a == b)
elif d=='|' or d == 'bitor': c = a|b
elif d=='||' or d == 'or' : c = int(a or b)
elif d=='&' or d == 'bitand': c = a&b
elif d=='&&' or d == 'and': c = int(a and b)
elif d=='!=' or d == 'not_eq': c = int(a != b)
elif d=='^' or d == 'xor': c = int(a^b)
elif d=='<=': c = int(a <= b)
elif d=='<': c = int(a < b)
elif d=='>': c = int(a > b)
elif d=='>=': c = int(a >= b)
elif d=='<<': c = a<<b
elif d=='>>': c = a>>b
else: c = 0
if d == '%':
c = a % b
elif d=='+':
c = a + b
elif d=='-':
c = a - b
elif d=='*':
c = a * b
elif d=='/':
c = a / b
elif d=='^':
c = a ^ b
elif d=='==':
c = int(a == b)
elif d=='|' or d == 'bitor':
c = a | b
elif d=='||' or d == 'or' :
c = int(a or b)
elif d=='&' or d == 'bitand':
c = a & b
elif d=='&&' or d == 'and':
c = int(a and b)
elif d=='!=' or d == 'not_eq':
c = int(a != b)
elif d=='^' or d == 'xor':
c = int(a^b)
elif d=='<=':
c = int(a <= b)
elif d=='<':
c = int(a < b)
elif d=='>':
c = int(a > b)
elif d=='>=':
c = int(a >= b)
elif d=='<<':
c = a << b
elif d=='>>':
c = a >> b
else:
c = 0
return c
def get_num(lst):
@ -227,7 +235,8 @@ def get_num(lst):
:return: a pair containing the number and the rest of the list
:rtype: tuple(value, list)
"""
if not lst: raise PreprocError('empty list for get_num')
if not lst:
raise PreprocError('empty list for get_num')
(p, v) = lst[0]
if p == OP:
if v == '(':
@ -283,7 +292,8 @@ def get_term(lst):
:rtype: value, list
"""
if not lst: raise PreprocError('empty list for get_term')
if not lst:
raise PreprocError('empty list for get_term')
num, lst = get_num(lst)
if not lst:
return (num, [])
@ -466,18 +476,22 @@ def reduce_tokens(lst, defs, ban=[]):
one_param.append((p2, v2))
count_paren += 1
elif v2 == ')':
if one_param: args.append(one_param)
if one_param:
args.append(one_param)
break
elif v2 == ',':
if not one_param: raise PreprocError('empty param in funcall %r' % v)
if not one_param:
raise PreprocError('empty param in funcall %r' % v)
args.append(one_param)
one_param = []
else:
one_param.append((p2, v2))
else:
one_param.append((p2, v2))
if v2 == '(': count_paren += 1
elif v2 == ')': count_paren -= 1
if v2 == '(':
count_paren += 1
elif v2 == ')':
count_paren -= 1
else:
raise PreprocError('malformed macro')
@ -514,7 +528,6 @@ def reduce_tokens(lst, defs, ban=[]):
accu.append((p2, v2))
accu.extend(toks)
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
# TODO not sure
# first collect the tokens
va_toks = []
st = len(macro_def[0])
@ -522,7 +535,8 @@ def reduce_tokens(lst, defs, ban=[]):
for x in args[pt-st+1:]:
va_toks.extend(x)
va_toks.append((OP, ','))
if va_toks: va_toks.pop() # extra comma
if va_toks:
va_toks.pop() # extra comma
if len(accu)>1:
(p3, v3) = accu[-1]
(p4, v4) = accu[-2]
@ -570,7 +584,8 @@ def eval_macro(lst, defs):
:rtype: int
"""
reduce_tokens(lst, defs, [])
if not lst: raise PreprocError('missing tokens to evaluate')
if not lst:
raise PreprocError('missing tokens to evaluate')
if lst:
p, v = lst[0]
@ -597,7 +612,8 @@ def extract_macro(txt):
p, name = t[0]
p, v = t[1]
if p != OP: raise PreprocError('expected (')
if p != OP:
raise PreprocError('expected (')
i = 1
pindex = 0
@ -700,16 +716,20 @@ def parse_char(txt):
return ord(txt)
c = txt[1]
if c == 'x':
if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
if len(txt) == 4 and txt[3] in string.hexdigits:
return int(txt[2:], 16)
return int(txt[2:], 16)
elif c.isdigit():
if c == '0' and len(txt)==2: return 0
if c == '0' and len(txt)==2:
return 0
for i in 3, 2, 1:
if len(txt) > i and txt[1:1+i].isdigit():
return (1+i, int(txt[1:1+i], 8))
else:
try: return chr_esc[c]
except KeyError: raise PreprocError('could not parse char literal %r' % txt)
try:
return chr_esc[c]
except KeyError:
raise PreprocError('could not parse char literal %r' % txt)
def tokenize(s):
"""
@ -730,28 +750,32 @@ def tokenize_private(s):
v = m(name)
if v:
if name == IDENT:
try:
g_optrans[v]
if v in g_optrans:
name = OP
except KeyError:
# c++ specific
if v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif name == NUM:
if m('oct'): v = int(v, 8)
elif m('hex'): v = int(m('hex'), 16)
elif m('n0'): v = m('n0')
if m('oct'):
v = int(v, 8)
elif m('hex'):
v = int(m('hex'), 16)
elif m('n0'):
v = m('n0')
else:
v = m('char')
if v: v = parse_char(v)
else: v = m('n2') or m('n4')
if v:
v = parse_char(v)
else:
v = m('n2') or m('n4')
elif name == OP:
if v == '%:': v = '#'
elif v == '%:%:': v = '##'
if v == '%:':
v = '#'
elif v == '%:%:':
v = '##'
elif name == STR:
# remove the quotes around the string
v = v[1:-1]
@ -807,6 +831,9 @@ class c_parser(object):
self.ban_includes = set()
"""Includes that must not be read (#pragma once)"""
self.listed = set()
"""Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""
def cached_find_resource(self, node, filename):
"""
Find a file from the input directory
@ -821,7 +848,6 @@ class c_parser(object):
try:
cache = node.ctx.preproc_cache_node
except AttributeError:
global FILE_CACHE_SIZE
cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)
key = (node, filename)
@ -839,7 +865,7 @@ class c_parser(object):
cache[key] = ret
return ret
def tryfind(self, filename):
def tryfind(self, filename, kind='"', env=None):
"""
Try to obtain a node from the filename based from the include paths. Will add
the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
@ -853,26 +879,37 @@ class c_parser(object):
"""
if filename.endswith('.moc'):
# we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient. TODO waf 1.9
# in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
self.names.append(filename)
return None
self.curfile = filename
# for msvc it should be a for loop over the whole stack
found = self.cached_find_resource(self.currentnode_stack[-1], filename)
found = None
if kind == '"':
if env.MSVC_VERSION:
for n in reversed(self.currentnode_stack):
found = self.cached_find_resource(n, filename)
if found:
break
else:
found = self.cached_find_resource(self.currentnode_stack[-1], filename)
for n in self.nodepaths:
if found:
break
found = self.cached_find_resource(n, filename)
if not found:
for n in self.nodepaths:
found = self.cached_find_resource(n, filename)
if found:
break
listed = self.listed
if found and not found in self.ban_includes:
# TODO duplicates do not increase the no-op build times too much, but they may be worth removing
self.nodes.append(found)
if found not in listed:
listed.add(found)
self.nodes.append(found)
self.addlines(found)
else:
if not filename in self.names:
if filename not in listed:
listed.add(filename)
self.names.append(filename)
return found
@ -887,7 +924,8 @@ class c_parser(object):
# return a list of tuples : keyword, line
code = node.read()
if use_trigraphs:
for (a, b) in trig_def: code = code.split(a).join(b)
for (a, b) in trig_def:
code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return re_lines.findall(code)
@ -896,7 +934,6 @@ class c_parser(object):
try:
cache = node.ctx.preproc_cache_lines
except AttributeError:
global LINE_CACHE_SIZE
cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
try:
return cache[node]
@ -929,8 +966,7 @@ class c_parser(object):
raise PreprocError('could not read the file %r' % node)
except Exception:
if Logs.verbose > 0:
Logs.error('parsing %r failed', node)
traceback.print_exc()
Logs.error('parsing %r failed %s', node, traceback.format_exc())
else:
self.lines.extend(lines)
@ -963,8 +999,6 @@ class c_parser(object):
continue
try:
ve = Logs.verbose
if ve: Logs.debug('preproc: line is %s - %s state is %s', token, line, self.state)
state = self.state
# make certain we define the state if we are about to enter in an if block
@ -980,23 +1014,27 @@ class c_parser(object):
if token == 'if':
ret = eval_macro(tokenize(line), self.defs)
if ret: state[-1] = accepted
else: state[-1] = ignored
if ret:
state[-1] = accepted
else:
state[-1] = ignored
elif token == 'ifdef':
m = re_mac.match(line)
if m and m.group() in self.defs: state[-1] = accepted
else: state[-1] = ignored
if m and m.group() in self.defs:
state[-1] = accepted
else:
state[-1] = ignored
elif token == 'ifndef':
m = re_mac.match(line)
if m and m.group() in self.defs: state[-1] = ignored
else: state[-1] = accepted
if m and m.group() in self.defs:
state[-1] = ignored
else:
state[-1] = accepted
elif token == 'include' or token == 'import':
(kind, inc) = extract_include(line, self.defs)
if ve: Logs.debug('preproc: include found %s (%s) ', inc, kind)
if kind == '"' or not strict_quotes:
self.current_file = self.tryfind(inc)
if token == 'import':
self.ban_includes.add(self.current_file)
self.current_file = self.tryfind(inc, kind, env)
if token == 'import':
self.ban_includes.add(self.current_file)
elif token == 'elif':
if state[-1] == accepted:
state[-1] = skipped
@ -1004,8 +1042,10 @@ class c_parser(object):
if eval_macro(tokenize(line), self.defs):
state[-1] = accepted
elif token == 'else':
if state[-1] == accepted: state[-1] = skipped
elif state[-1] == ignored: state[-1] = accepted
if state[-1] == accepted:
state[-1] = skipped
elif state[-1] == ignored:
state[-1] = accepted
elif token == 'define':
try:
self.defs[self.define_name(line)] = line
@ -1019,9 +1059,9 @@ class c_parser(object):
elif token == 'pragma':
if re_pragma_once.match(line.lower()):
self.ban_includes.add(self.current_file)
except Exception ,e:
except Exception as e:
if Logs.verbose:
Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())
def define_name(self, line):
"""
@ -1040,9 +1080,6 @@ def scan(task):
This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
"""
global go_absolute
try:
incn = task.generator.includes_nodes
except AttributeError:

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2016 (ita)
# Thomas Nagy, 2016-2018 (ita)
"""
Various configuration tests.
@ -203,7 +203,7 @@ class grep_for_endianness(Task.Task):
"""
color = 'PINK'
def run(self):
txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
txt = self.inputs[0].read(flags='rb').decode('latin-1')
if txt.find('LiTTleEnDian') > -1:
self.generator.tmp.append('little')
elif txt.find('BIGenDianSyS') > -1:
@ -230,3 +230,4 @@ def check_endianness(self):
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
return tmp[0]

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"""
Classes and methods shared by tools providing support for C-like language such
@ -135,6 +135,9 @@ class link_task(Task.Task):
"""
color = 'YELLOW'
weight = 3
"""Try to process link tasks as early as possible"""
inst_to = None
"""Default installation path for the link task outputs, or None to disable"""
@ -231,8 +234,10 @@ class stlink_task(link_task):
def rm_tgt(cls):
old = cls.run
def wrap(self):
try: os.remove(self.outputs[0].abspath())
except OSError: pass
try:
os.remove(self.outputs[0].abspath())
except OSError:
pass
return old(self)
setattr(cls, 'run', wrap)
rm_tgt(stlink_task)
@ -272,7 +277,7 @@ def apply_link(self):
try:
inst_to = self.install_path
except AttributeError:
inst_to = self.link_task.__class__.inst_to
inst_to = self.link_task.inst_to
if inst_to:
# install a copy of the node list we have at this moment (implib not added)
self.install_task = self.add_install_files(
@ -395,7 +400,8 @@ def process_use(self):
self.add_objects_from_tgen(y)
if getattr(y, 'export_includes', None):
self.includes.extend(y.to_incnodes(y.export_includes))
# self.includes may come from a global variable #2035
self.includes = self.includes + y.to_incnodes(y.export_includes)
if getattr(y, 'export_defines', None):
self.env.append_value('DEFINES', self.to_list(y.export_defines))
@ -597,7 +603,7 @@ def apply_vnum(self):
self.create_task('vnum', node, outs)
if getattr(self, 'install_task', None):
self.install_task.hasrun = Task.SKIP_ME
self.install_task.hasrun = Task.SKIPPED
path = self.install_task.install_to
if self.env.DEST_OS == 'openbsd':
libname = self.link_task.outputs[0].name
@ -617,7 +623,7 @@ def apply_vnum(self):
try:
inst_to = self.install_path
except AttributeError:
inst_to = self.link_task.__class__.inst_to
inst_to = self.link_task.inst_to
if inst_to:
p = Utils.subst_vars(inst_to, self.env)
path = os.path.join(p, name2)
@ -770,3 +776,4 @@ def set_full_paths_hpux(self):
else:
lst.append(os.path.normpath(os.path.join(base, x)))
self.env[var] = lst

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2009-2016 (ita)
# Thomas Nagy 2009-2018 (ita)
"""
Detect the Clang++ C++ compiler
@ -31,3 +31,4 @@ def configure(conf):
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()

View File

@ -81,7 +81,7 @@ def configure(conf):
conf.start_msg('Checking for %r (C compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_c: %r', e)
@ -111,3 +111,4 @@ def options(opt):
for x in test_for_compiler.split():
opt.load('%s' % x)

View File

@ -82,7 +82,7 @@ def configure(conf):
conf.start_msg('Checking for %r (C++ compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_cxx: %r', e)
@ -112,3 +112,4 @@ def options(opt):
for x in test_for_compiler.split():
opt.load('%s' % x)

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2016 (ita)
# Thomas Nagy, 2016-2018 (ita)
"""
Try to detect a D compiler from the list of supported compilers::
@ -58,7 +58,7 @@ def configure(conf):
conf.start_msg('Checking for %r (D compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_d: %r', e)
@ -86,3 +86,4 @@ def options(opt):
for x in test_for_compiler.split():
opt.load('%s' % x)

View File

@ -44,7 +44,7 @@ def configure(conf):
conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
except conf.errors.ConfigurationError as e:
conf.env.revert()
conf.end_msg(False)
Logs.debug('compiler_fortran: %r', e)
@ -74,3 +74,4 @@ def options(opt):
for x in test_for_compiler.split():
opt.load('%s' % x)

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
C# support. A simple example::
@ -107,10 +107,10 @@ def debug_cs(self):
else:
out = node.change_ext('.pdb')
self.cs_task.outputs.append(out)
try:
self.install_task.source.append(out)
except AttributeError:
pass
if getattr(self, 'install_task', None):
self.pdb_install_task = self.add_install_files(
install_to=self.install_task.install_to, install_from=out)
if csdebug == 'pdbonly':
val = ['/debug+', '/debug:pdbonly']
@ -120,6 +120,29 @@ def debug_cs(self):
val = ['/debug-']
self.env.append_value('CSFLAGS', val)
@feature('cs')
@after_method('debug_cs')
def doc_cs(self):
"""
The C# targets may create .xml documentation files::
def build(bld):
bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True)
# csdoc is a boolean value
"""
csdoc = getattr(self, 'csdoc', self.env.CSDOC)
if not csdoc:
return
node = self.cs_task.outputs[0]
out = node.change_ext('.xml')
self.cs_task.outputs.append(out)
if getattr(self, 'install_task', None):
self.doc_install_task = self.add_install_files(
install_to=self.install_task.install_to, install_from=out)
self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath())
class mcs(Task.Task):
"""
@ -128,10 +151,16 @@ class mcs(Task.Task):
color = 'YELLOW'
run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
def exec_command(self, cmd, **kw):
if '/noconfig' in cmd:
raise ValueError('/noconfig is not allowed when using response files, check your flags!')
return super(self.__class__, self).exec_command(cmd, **kw)
def split_argfile(self, cmd):
inline = [cmd[0]]
infile = []
for x in cmd[1:]:
# csc doesn't want /noconfig in @file
if x.lower() == '/noconfig':
inline.append(x)
else:
infile.append(self.quote_flag(x))
return (inline, infile)
def configure(conf):
"""
@ -183,3 +212,4 @@ def read_csshlib(self, name, paths=[]):
:rtype: :py:class:`waflib.TaskGen.task_gen`
"""
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2016 (ita)
# Thomas Nagy, 2005-2018 (ita)
"Base for c++ programs and libraries"
@ -41,3 +41,4 @@ class cxxshlib(cxxprogram):
class cxxstlib(stlink_task):
"Links object files into c++ static libraries"
pass # do not remove

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2007-2016 (ita)
# Thomas Nagy, 2007-2018 (ita)
from waflib import Utils, Task, Errors
from waflib.TaskGen import taskgen_method, feature, extension
@ -98,3 +98,4 @@ def process_header(self):
if not node:
raise Errors.WafError('file %r not found on d obj' % i[0])
self.create_task('d_header', node, node.change_ext('.di'))

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2016 (ita)
# Thomas Nagy, 2016-2018 (ita)
from waflib import Utils
from waflib.Configure import conf
@ -65,3 +65,4 @@ def check_dlibrary(self, execute=True):
ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
if execute:
self.env.DLIBRARY = ret.strip()

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2016 (ita)
# Thomas Nagy, 2016-2018 (ita)
"""
Provide a scanner for finding dependencies on d files
@ -33,7 +33,8 @@ def filter_comments(filename):
i += 1
while i < max:
c = txt[i]
if c == delim: break
if c == delim:
break
elif c == '\\': # skip the character following backslash
i += 1
i += 1
@ -42,7 +43,8 @@ def filter_comments(filename):
elif c == '/': # try to replace a comment with whitespace
buf.append(txt[begin:i])
i += 1
if i == max: break
if i == max:
break
c = txt[i]
if c == '+': # eat nesting /+ +/ comment
i += 1
@ -56,7 +58,8 @@ def filter_comments(filename):
c = None
elif prev == '+' and c == '/':
nesting -= 1
if nesting == 0: break
if nesting == 0:
break
c = None
i += 1
elif c == '*': # eat /* */ comment
@ -65,7 +68,8 @@ def filter_comments(filename):
while i < max:
prev = c
c = txt[i]
if prev == '*' and c == '/': break
if prev == '*' and c == '/':
break
i += 1
elif c == '/': # eat // comment
i += 1
@ -192,7 +196,8 @@ class d_parser(object):
names = self.get_strings(code) # obtain the import strings
for x in names:
# optimization
if x in self.allnames: continue
if x in self.allnames:
continue
self.allnames.append(x)
# for each name, see if it is like a node or not
@ -207,3 +212,4 @@ def scan(self):
nodes = gruik.nodes
names = gruik.names
return (nodes, names)

View File

@ -71,3 +71,4 @@ def configure(conf):
Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
"""
conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2008-2016 (ita)
# Thomas Nagy, 2008-2018 (ita)
import sys
from waflib.Tools import ar, d
@ -81,3 +81,4 @@ def configure(conf):
if str(conf.env.D).find('ldc') > -1:
conf.common_flags_ldc()

View File

@ -22,6 +22,7 @@ typos = {
'importpath':'includes',
'installpath':'install_path',
'iscopy':'is_copy',
'uses':'use',
}
meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
@ -73,8 +74,11 @@ def check_same_targets(self):
for (k, v) in uids.items():
if len(v) > 1:
Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
tg_details = tsk.generator.name
if Logs.verbose > 2:
tg_details = tsk.generator
for tsk in v:
Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tsk.generator)
Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
def check_invalid_constraints(self):
feat = set()
@ -135,16 +139,23 @@ def enhance_lib():
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
if '.' in sp:
Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
if kw.get('remove', True):
try:
if self.is_child_of(self.ctx.bldnode) and not kw.get('quiet', False):
Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)', self)
except AttributeError:
pass
return self.old_ant_glob(*k, **kw)
Node.Node.old_ant_glob = Node.Node.ant_glob
Node.Node.ant_glob = ant_glob
# catch ant_glob on build folders
def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
if remove:
try:
if self.is_child_of(self.ctx.bldnode) and not quiet:
quiet = True
Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
except AttributeError:
pass
return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
Node.Node.old_ant_iter = Node.Node.ant_iter
Node.Node.ant_iter = ant_iter
# catch conflicting ext_in/ext_out/before/after declarations
old = Task.is_before
def is_before(t1, t2):
@ -174,7 +185,7 @@ def enhance_lib():
else:
for x in ('before', 'after'):
for y in self.to_list(getattr(self, x, [])):
if not Task.classes.get(y, None):
if not Task.classes.get(y):
Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
TaskGen.feature('*')(check_err_order)
@ -216,7 +227,7 @@ def enhance_lib():
elif name == 'prepend':
raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
if name in self.__slots__:
return object.__getattr__(self, name, default)
return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
else:
return self[name]
ConfigSet.ConfigSet.__getattr__ = _getattr
@ -227,3 +238,4 @@ def options(opt):
Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
"""
enhance_lib()

View File

@ -5,13 +5,13 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
"""
Fortran support
"""
from waflib import Utils, Task
from waflib import Utils, Task, Errors
from waflib.Tools import ccroot, fc_config, fc_scan
from waflib.TaskGen import extension
from waflib.Configure import conf
@ -50,7 +50,7 @@ def get_fortran_tasks(tsk):
class fc(Task.Task):
"""
Fortran tasks can only run when all fortran tasks in the current group are ready to be executed
Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed
This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
"""
@ -89,12 +89,11 @@ class fc(Task.Task):
ret = tsk.runnable_status()
if ret == Task.ASK_LATER:
# we have to wait for one of the other fortran tasks to be ready
# this may deadlock if there are dependencies between the fortran tasks
# this may deadlock if there are dependencies between fortran tasks
# but this should not happen (we are setting them here!)
for x in lst:
x.mod_fortran_done = None
# TODO sort the list of tasks in bld.producer.outstanding to put all fortran tasks at the end
return Task.ASK_LATER
ins = Utils.defaultdict(set)
@ -108,7 +107,7 @@ class fc(Task.Task):
name = bld.modfile(x.replace('MOD@', ''))
node = bld.srcnode.find_or_declare(name)
tsk.set_outputs(node)
outs[id(node)].add(tsk)
outs[node].add(tsk)
# the .mod files to use
for tsk in lst:
@ -120,7 +119,7 @@ class fc(Task.Task):
if node and node not in tsk.outputs:
if not node in bld.node_deps[key]:
bld.node_deps[key].append(node)
ins[id(node)].add(tsk)
ins[node].add(tsk)
# if the intersection matches, set the order
for k in ins.keys():
@ -182,10 +181,11 @@ class fcprogram_test(fcprogram):
kw['output'] = 0
try:
(bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
except Exception:
except Errors.WafError:
return -1
if bld.out:
bld.to_log('out: %s\n' % bld.out)
if bld.err:
bld.to_log('err: %s\n' % bld.err)

View File

@ -5,7 +5,7 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
"""
Fortran configuration helpers
@ -121,7 +121,7 @@ def fortran_modifier_win32(conf):
v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
v.fcshlib_PATTERN = '%s.dll'
v.implib_PATTERN = 'lib%s.dll.a'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'
v.FCFLAGS_fcshlib = []
@ -343,10 +343,10 @@ def getoutput(conf, cmd, stdin=False):
else:
env = dict(os.environ)
env['LANG'] = 'C'
input = stdin and '\n' or None
input = stdin and '\n'.encode() or None
try:
out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
except Errors.WafError ,e:
except Errors.WafError as e:
# An WafError might indicate an error code during the command
# execution, in this case we still obtain the stderr and stdout,
# which we can use to find the version string.
@ -460,7 +460,7 @@ def detect_openmp(self):
"""
Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
"""
for x in ('-qopenmp', '-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
try:
self.check_fc(
msg = 'Checking for OpenMP flag %s' % x,
@ -475,3 +475,18 @@ def detect_openmp(self):
break
else:
self.fatal('Could not find OpenMP')
@conf
def check_gfortran_o_space(self):
if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4:
# This is for old compilers and only for gfortran.
# No idea how other implementations handle this. Be safe and bail out.
return
self.env.stash()
self.env.FCLNK_TGT_F = ['-o', '']
try:
self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib')
except self.errors.ConfigurationError:
self.env.revert()
else:
self.env.commit()

View File

@ -5,7 +5,7 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
import re
@ -115,3 +115,4 @@ class fortran_parser(object):
if not found:
if not filename in self.names:
self.names.append(filename)

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
The **flex** program is a code generator which creates C or C++ files.
@ -26,7 +26,8 @@ def flexfun(tsk):
bld = tsk.generator.bld
wd = bld.variant_dir
def to_list(xx):
if isinstance(xx, str): return [xx]
if isinstance(xx, str):
return [xx]
return xx
tsk.last_cmd = lst = []
lst.extend(to_list(env.FLEX))
@ -62,3 +63,4 @@ def configure(conf):
if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
# this is the flex shipped with MSYS
conf.env.FLEX_MSYS = True

View File

@ -5,7 +5,7 @@
#! /usr/bin/env python
# encoding: utf-8
# KWS 2010
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
import re
from waflib import Utils
@ -67,3 +67,4 @@ def configure(conf):
conf.fc_add_flags()
conf.g95_flags()
conf.g95_modifier_platform()

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2016 (ita)
# Thomas Nagy, 2008-2018 (ita)
"Detect as/gas/gcc for compiling assembly files"

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
@ -72,7 +72,7 @@ def gcc_modifier_win32(conf):
v.cprogram_PATTERN = '%s.exe'
v.cshlib_PATTERN = '%s.dll'
v.implib_PATTERN = 'lib%s.dll.a'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'
v.CFLAGS_cshlib = []
@ -156,3 +156,5 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
conf.check_gcc_o_space()

View File

@ -56,3 +56,4 @@ def configure(conf):
conf.load('d')
conf.common_flags_gdc()
conf.d_platform_flags()

View File

@ -5,7 +5,7 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
import re
from waflib import Utils
@ -54,8 +54,10 @@ def get_gfortran_version(conf, fc):
version_re = re.compile(r"GNU\s*Fortran", re.I).search
cmd = fc + ['--version']
out, err = fc_config.getoutput(conf, cmd, stdin=False)
if out: match = version_re(out)
else: match = version_re(err)
if out:
match = version_re(out)
else:
match = version_re(err)
if not match:
conf.fatal('Could not determine the compiler type')
@ -92,3 +94,4 @@ def configure(conf):
conf.fc_add_flags()
conf.gfortran_flags()
conf.gfortran_modifier_platform()
conf.check_gfortran_o_space()

View File

@ -4,7 +4,7 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
Support for GLib2 tools:
@ -74,7 +74,8 @@ class glib_genmarshal(Task.Task):
)
ret = bld.exec_command(cmd1)
if ret: return ret
if ret:
return ret
#print self.outputs[1].abspath()
c = '''#include "%s"\n''' % self.outputs[0].name
@ -247,7 +248,7 @@ def add_settings_enums(self, namespace, filename_list):
raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
self.settings_enum_namespace = namespace
if type(filename_list) != 'list':
if not isinstance(filename_list, list):
filename_list = [filename_list]
self.settings_enum_files = filename_list
@ -455,7 +456,6 @@ def find_glib_compile_schemas(conf):
def getstr(varname):
return getattr(Options.options, varname, getattr(conf.env,varname, ''))
# TODO make this dependent on the gnu_dirs tool?
gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
if not gsettingsschemadir:
datadir = getstr('DATADIR')
@ -490,3 +490,4 @@ def options(opt):
"""
gr = opt.add_option_group('Installation directories')
gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')

View File

@ -132,3 +132,4 @@ def options(opt):
str_default = default
str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
@ -72,7 +72,7 @@ def gxx_modifier_win32(conf):
v.cxxprogram_PATTERN = '%s.exe'
v.cxxshlib_PATTERN = '%s.dll'
v.implib_PATTERN = 'lib%s.dll.a'
v.implib_PATTERN = '%s.dll.a'
v.IMPLIB_ST = '-Wl,--out-implib,%s'
v.CXXFLAGS_cxxshlib = []
@ -157,3 +157,5 @@ def configure(conf):
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
conf.check_gcc_o_space('cxx')

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Stian Selnes 2008
# Thomas Nagy 2009-2016 (ita)
# Thomas Nagy 2009-2018 (ita)
"""
Detects the Intel C compiler

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2009-2016 (ita)
# Thomas Nagy 2009-2018 (ita)
"""
Detects the Intel C++ compiler
@ -31,3 +31,4 @@ def configure(conf):
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()

View File

@ -5,9 +5,9 @@
#! /usr/bin/env python
# encoding: utf-8
# DC 2008
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
import os, re
import os, re, traceback
from waflib import Utils, Logs, Errors
from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
from waflib.Configure import conf
@ -80,7 +80,7 @@ def configure(conf):
Detects the Intel Fortran compilers
"""
if Utils.is_win32:
compiler, version, path, includes, libdirs, arch = conf.detect_ifort(True)
compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
v = conf.env
v.DEST_CPU = arch
v.PATH = path
@ -89,8 +89,7 @@ def configure(conf):
v.MSVC_COMPILER = compiler
try:
v.MSVC_VERSION = float(version)
except Exception:
raise
except ValueError:
v.MSVC_VERSION = float(version[:-3])
conf.find_ifort_win32()
@ -115,32 +114,34 @@ def gather_ifort_versions(conf, versions):
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
except WindowsError:
except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
except WindowsError:
except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
except WindowsError:
except OSError:
break
index += 1
if not version_pattern.match(version):
continue
targets = {}
for target,arch in all_ifort_platforms:
if target=='intel64': targetDir='EM64T_NATIVE'
else: targetDir=target
if target=='intel64':
targetDir='EM64T_NATIVE'
else:
targetDir=target
try:
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
except WindowsError:
except OSError:
pass
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
batch_file=os.path.join(path,'bin','ifortvars.bat')
if os.path.isfile(batch_file):
targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
@ -148,10 +149,10 @@ def gather_ifort_versions(conf, versions):
try:
icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
except WindowsError:
except OSError:
continue
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
batch_file=os.path.join(path,'bin','ifortvars.bat')
if os.path.isfile(batch_file):
targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
major = version[0:2]
@ -235,11 +236,11 @@ echo LIB=%%LIB%%;%%LIBPATH%%
try:
conf.cmd_and_log(fc + ['/help'], env=env)
except UnicodeError:
st = Utils.ex_stack()
st = traceback.format_exc()
if conf.logger:
conf.logger.error(st)
conf.fatal('ifort: Unicode error - check the code page?')
except Exception ,e:
except Exception as e:
Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
else:
@ -281,7 +282,7 @@ class target_compiler(object):
return
self.is_done = True
try:
vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat)
vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat)
except Errors.ConfigurationError:
self.is_valid = False
return
@ -338,7 +339,8 @@ def find_ifort_win32(conf):
# before setting anything, check if the compiler is really intel fortran
env = dict(conf.environ)
if path: env.update(PATH = ';'.join(path))
if path:
env.update(PATH = ';'.join(path))
if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
conf.fatal('not intel fortran compiler could not be identified')
@ -412,3 +414,4 @@ def apply_manifest_ifort(self):
man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
self.link_task.outputs.append(man_node)
self.env.DO_MANIFEST = True

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
Support for translation tools such as msgfmt and intltool
@ -31,6 +31,8 @@ Usage::
Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
"""
from __future__ import with_statement
import os, re
from waflib import Context, Task, Utils, Logs
import waflib.Tools.ccroot
@ -90,8 +92,10 @@ def apply_intltool_in_f(self):
:param install_path: installation path
:type install_path: string
"""
try: self.meths.remove('process_source')
except ValueError: pass
try:
self.meths.remove('process_source')
except ValueError:
pass
self.ensure_localedir()
@ -145,8 +149,10 @@ def apply_intltool_po(self):
The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
"""
try: self.meths.remove('process_source')
except ValueError: pass
try:
self.meths.remove('process_source')
except ValueError:
pass
self.ensure_localedir()
@ -157,13 +163,12 @@ def apply_intltool_po(self):
linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
if linguas:
# scan LINGUAS file for locales to process
file = open(linguas.abspath())
langs = []
for line in file.readlines():
# ignore lines containing comments
if not line.startswith('#'):
langs += line.split()
file.close()
with open(linguas.abspath()) as f:
langs = []
for line in f.readlines():
# ignore lines containing comments
if not line.startswith('#'):
langs += line.split()
re_linguas = re.compile('[-a-zA-Z_@.]+')
for lang in langs:
# Make sure that we only process lines which contain locales
@ -227,3 +232,4 @@ def configure(conf):
conf.find_intltool_merge()
if conf.env.CC or conf.env.CXX:
conf.check(header_name='locale.h')

View File

@ -10,6 +10,7 @@
Compiler definition for irix/MIPSpro cc compiler
"""
from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@ -28,7 +29,7 @@ def find_irixcc(conf):
try:
conf.cmd_and_log(cc + ['-version'])
except Exception:
except Errors.WafError:
conf.fatal('%r -version could not be executed' % cc)
v.CC = cc
@ -66,3 +67,4 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
Java support
@ -96,6 +96,7 @@ def apply_java(self):
if not y:
self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
tmp.append(y)
tsk.srcdir = tmp
if getattr(self, 'compat', None):
@ -111,6 +112,7 @@ def apply_java(self):
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
@feature('javac')
@before_method('propagate_uselib_vars')
@after_method('apply_java')
def use_javac_files(self):
"""
@ -141,7 +143,8 @@ def set_classpath(self):
"""
Sets the CLASSPATH value on the *javac* task previously created.
"""
self.env.append_value('CLASSPATH', getattr(self, 'classpath', []))
if getattr(self, 'classpath', None):
self.env.append_unique('CLASSPATH', getattr(self, 'classpath', []))
for x in self.tasks:
x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
@ -169,9 +172,11 @@ def jar_files(self):
if manifest:
jarcreate = getattr(self, 'jarcreate', 'cfm')
if not isinstance(manifest,Node.Node):
node = self.path.find_or_declare(manifest)
node = self.path.find_resource(manifest)
else:
node = manifest
if not node:
self.bld.fatal('invalid manifest file %r for %r' % (manifest, self))
tsk.dep_nodes.append(node)
jaropts.insert(0, node.abspath())
else:
@ -243,7 +248,6 @@ class jar_create(JTask):
if not t.hasrun:
return Task.ASK_LATER
if not self.inputs:
global JAR_RE
try:
self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
except Exception:
@ -276,10 +280,10 @@ class javac(JTask):
return Task.ASK_LATER
if not self.inputs:
global SOURCE_RE
self.inputs = []
for x in self.srcdir:
self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
if x.exists():
self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
return super(javac, self).runnable_status()
def post_run(self):
@ -461,3 +465,4 @@ def check_jni_headers(conf):
break
else:
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)

View File

@ -57,3 +57,4 @@ def configure(conf):
conf.load('d')
conf.common_flags_ldc2()
conf.d_platform_flags()

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Sebastian Schlingmann, 2008
# Thomas Nagy, 2008-2016 (ita)
# Thomas Nagy, 2008-2018 (ita)
"""
Lua support.
@ -39,3 +39,4 @@ def configure(conf):
Detect the luac compiler and set *conf.env.LUAC*
"""
conf.find_program('luac', var='LUAC')

View File

@ -29,15 +29,15 @@ def h_file(self):
if filename in cache and cache[filename][0] == st.st_mtime:
return cache[filename][1]
global STRONGEST
if STRONGEST:
ret = Utils.h_file(filename)
else:
if stat.S_ISDIR(st[stat.ST_MODE]):
raise IOError('Not a file')
ret = Utils.md5(str((st.st_mtime, st.st_size))).digest()
ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest()
cache[filename] = (st.st_mtime, ret)
return ret
h_file.__doc__ = Node.Node.h_file.__doc__
Node.Node.h_file = h_file

View File

@ -42,7 +42,7 @@ Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, am
Compilers supported:
* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 12.0 (Visual Studio 2013)
* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017)
* wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0
* icl => Intel compiler, versions 9, 10, 11, 13
* winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now)
@ -56,7 +56,7 @@ cmd.exe /C "chcp 1252 & set PYTHONUNBUFFERED=true && set && waf configure"
Setting PYTHONUNBUFFERED gives the unbuffered output.
"""
import os, sys, re
import os, sys, re, traceback
from waflib import Utils, Logs, Options, Errors
from waflib.TaskGen import after_method, feature
@ -117,7 +117,7 @@ def setup_msvc(conf, versiondict):
platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions = getattr(Options.options, 'msvc_version', '').split(',')
if desired_versions == ['']:
desired_versions = conf.env.MSVC_VERSIONS or list(versiondict.keys())
desired_versions = conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys())))
# Override lazy detection by evaluating after the fact.
lazy_detect = getattr(Options.options, 'msvc_lazy', True)
@ -134,15 +134,23 @@ def setup_msvc(conf, versiondict):
conf.env.MSVC_INSTALLED_VERSIONS = versiondict
for version in desired_versions:
Logs.debug('msvc: detecting %r - %r', version, desired_versions)
try:
targets = versiondict[version]
except KeyError:
continue
seen = set()
for arch in platforms:
if arch in seen:
continue
else:
seen.add(arch)
try:
cfg = targets[arch]
except KeyError:
continue
cfg.evaluate()
if cfg.is_valid:
compiler,revision = version.rsplit(' ', 1)
@ -209,11 +217,11 @@ echo LIB=%%LIB%%;%%LIBPATH%%
try:
conf.cmd_and_log(cxx + ['/help'], env=env)
except UnicodeError:
st = Utils.ex_stack()
st = traceback.format_exc()
if conf.logger:
conf.logger.error(st)
conf.fatal('msvc: Unicode error - check the code page?')
except Exception ,e:
except Exception as e:
Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e))
conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
else:
@ -223,42 +231,6 @@ echo LIB=%%LIB%%;%%LIBPATH%%
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
@conf
def gather_wsdk_versions(conf, versions):
"""
Use winreg to add the msvc versions to the input list
:param versions: list to modify
:type versions: list
"""
version_pattern = re.compile('^v..?.?\...?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
except WindowsError:
break
index += 1
if not version_pattern.match(version):
continue
try:
msvc_version = Utils.winreg.OpenKey(all_versions, version)
path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
except WindowsError:
continue
if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
targets = {}
for target,arch in all_msvc_platforms:
targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
versions['wsdk ' + version[1:]] = targets
def gather_wince_supported_platforms():
"""
Checks SmartPhones SDKs
@ -269,10 +241,10 @@ def gather_wince_supported_platforms():
supported_wince_platforms = []
try:
ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
except OSError:
try:
ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
except OSError:
ce_sdk = ''
if not ce_sdk:
return supported_wince_platforms
@ -282,15 +254,15 @@ def gather_wince_supported_platforms():
try:
sdk_device = Utils.winreg.EnumKey(ce_sdk, index)
sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
except WindowsError:
except OSError:
break
index += 1
try:
path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir')
except WindowsError:
except OSError:
try:
path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation')
except WindowsError:
except OSError:
continue
path,xml = os.path.split(path)
path = str(path)
@ -313,18 +285,18 @@ def gather_msvc_detected_versions():
prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
except WindowsError:
except OSError:
prefix = 'SOFTWARE\\Microsoft\\' + vcver
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
except WindowsError:
except OSError:
continue
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
except WindowsError:
except OSError:
break
index += 1
match = version_pattern.match(version)
@ -352,7 +324,6 @@ class target_compiler(object):
:param version: compiler version number
:param bat_target: ?
:param bat: path to the batch file to run
:param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
"""
self.conf = ctx
self.name = None
@ -381,10 +352,46 @@ class target_compiler(object):
(self.bindirs, self.incdirs, self.libdirs) = vs
def __str__(self):
return str((self.bindirs, self.incdirs, self.libdirs))
return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
def __repr__(self):
return repr((self.bindirs, self.incdirs, self.libdirs))
return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
@conf
def gather_wsdk_versions(conf, versions):
"""
Use winreg to add the msvc versions to the input list
:param versions: list to modify
:type versions: list
"""
version_pattern = re.compile('^v..?.?\...?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
except OSError:
break
index += 1
if not version_pattern.match(version):
continue
try:
msvc_version = Utils.winreg.OpenKey(all_versions, version)
path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
except OSError:
continue
if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
targets = {}
for target,arch in all_msvc_platforms:
targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
versions['wsdk ' + version[1:]] = targets
@conf
def gather_msvc_targets(conf, versions, version, vc_path):
@ -402,7 +409,7 @@ def gather_msvc_targets(conf, versions, version, vc_path):
elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')):
targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))
if targets:
versions['msvc ' + version] = targets
versions['msvc %s' % version] = targets
@conf
def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms):
@ -419,6 +426,7 @@ def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_pla
incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include]
libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib]
def combine_common(obj, compiler_env):
# TODO this is likely broken, remove in waf 2.1
(common_bindirs,_1,_2) = compiler_env
return (bindirs + common_bindirs, incdirs, libdirs)
targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common)
@ -434,6 +442,38 @@ def gather_winphone_targets(conf, versions, version, vc_path, vsvars):
if targets:
versions['winphone ' + version] = targets
@conf
def gather_vswhere_versions(conf, versions):
try:
import json
except ImportError:
Logs.error('Visual Studio 2017 detection requires Python 2.6')
return
prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)'))
vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
args = [vswhere, '-products', '*', '-legacy', '-format', 'json']
try:
txt = conf.cmd_and_log(args)
except Errors.WafError as e:
Logs.debug('msvc: vswhere.exe failed %s', e)
return
if sys.version_info[0] < 3:
try:
txt = txt.decode(sys.stdout.encoding or 'cp1252')
except UnicodeError:
txt = txt.decode('utf-8', 'replace')
arr = json.loads(txt)
arr.sort(key=lambda x: x['installationVersion'])
for entry in arr:
ver = entry['installationVersion']
ver = str('.'.join(ver.split('.')[:2]))
path = str(os.path.abspath(entry['installationPath']))
if os.path.exists(path) and ('msvc %s' % ver) not in versions:
conf.gather_msvc_targets(versions, ver, path)
@conf
def gather_msvc_versions(conf, versions):
vc_paths = []
@ -441,14 +481,14 @@ def gather_msvc_versions(conf, versions):
try:
try:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC")
except WindowsError:
except OSError:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++")
path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir')
except WindowsError:
except OSError:
try:
msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
path,type = Utils.winreg.QueryValueEx(msvc_version, version)
except WindowsError:
except OSError:
continue
else:
vc_paths.append((version, os.path.abspath(str(path))))
@ -488,29 +528,31 @@ def gather_icl_versions(conf, versions):
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
except WindowsError:
except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
except WindowsError:
except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
except WindowsError:
except OSError:
break
index += 1
if not version_pattern.match(version):
continue
targets = {}
for target,arch in all_icl_platforms:
if target=='intel64': targetDir='EM64T_NATIVE'
else: targetDir=target
if target=='intel64':
targetDir='EM64T_NATIVE'
else:
targetDir=target
try:
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
except WindowsError:
except OSError:
pass
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
@ -520,7 +562,7 @@ def gather_icl_versions(conf, versions):
try:
icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
except WindowsError:
except OSError:
continue
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
@ -540,28 +582,30 @@ def gather_intel_composer_versions(conf, versions):
version_pattern = re.compile('^...?.?\...?.?.?')
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites')
except WindowsError:
except OSError:
try:
all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
except WindowsError:
except OSError:
return
index = 0
while 1:
try:
version = Utils.winreg.EnumKey(all_versions, index)
except WindowsError:
except OSError:
break
index += 1
if not version_pattern.match(version):
continue
targets = {}
for target,arch in all_icl_platforms:
if target=='intel64': targetDir='EM64T_NATIVE'
else: targetDir=target
if target=='intel64':
targetDir='EM64T_NATIVE'
else:
targetDir=target
try:
try:
defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
except WindowsError:
except OSError:
if targetDir == 'EM64T_NATIVE':
defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
else:
@ -570,7 +614,7 @@ def gather_intel_composer_versions(conf, versions):
Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
except WindowsError:
except OSError:
pass
else:
batch_file=os.path.join(path,'bin','iclvars.bat')
@ -611,6 +655,7 @@ def get_msvc_versions(self):
self.gather_intel_composer_versions(dct)
self.gather_wsdk_versions(dct)
self.gather_msvc_versions(dct)
self.gather_vswhere_versions(dct)
Logs.debug('msvc: detected versions %r', list(dct.keys()))
return dct
@ -668,7 +713,7 @@ def libname_msvc(self, libname, is_static=False):
(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
if lt_path != None and lt_libname != None:
if lt_static == True:
if lt_static:
# file existence check has been made by find_lt_names
return os.path.join(lt_path,lt_libname)
@ -765,7 +810,7 @@ def autodetect(conf, arch=False):
v.MSVC_COMPILER = compiler
try:
v.MSVC_VERSION = float(version)
except TypeError:
except ValueError:
v.MSVC_VERSION = float(version[:-3])
def _get_prog_names(conf, compiler):
@ -800,7 +845,8 @@ def find_msvc(conf):
# before setting anything, check if the compiler is really msvc
env = dict(conf.environ)
if path: env.update(PATH = ';'.join(path))
if path:
env.update(PATH = ';'.join(path))
if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env):
conf.fatal('the msvc compiler could not be identified')
@ -810,8 +856,7 @@ def find_msvc(conf):
# linker
if not v.LINK_CXX:
# TODO: var=LINK_CXX to let so that LINK_CXX can be overridden?
v.LINK_CXX = conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name)
conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name, var='LINK_CXX')
if not v.LINK_CC:
v.LINK_CC = v.LINK_CXX
@ -868,13 +913,6 @@ def msvc_common_flags(conf):
v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:'
# Subsystem specific flags
v.CFLAGS_CONSOLE = v.CXXFLAGS_CONSOLE = ['/SUBSYSTEM:CONSOLE']
v.CFLAGS_NATIVE = v.CXXFLAGS_NATIVE = ['/SUBSYSTEM:NATIVE']
v.CFLAGS_POSIX = v.CXXFLAGS_POSIX = ['/SUBSYSTEM:POSIX']
v.CFLAGS_WINDOWS = v.CXXFLAGS_WINDOWS = ['/SUBSYSTEM:WINDOWS']
v.CFLAGS_WINDOWSCE = v.CXXFLAGS_WINDOWSCE = ['/SUBSYSTEM:WINDOWSCE']
# CRT specific flags
v.CFLAGS_CRT_MULTITHREADED = v.CXXFLAGS_CRT_MULTITHREADED = ['/MT']
v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD']
@ -968,7 +1006,7 @@ def make_winphone_app(self):
Insert configuration flags for windows phone applications (adds /ZW, /TP...)
"""
make_winapp(self, 'WINAPI_FAMILY_PHONE_APP')
conf.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
@feature('winapp')
@after_method('process_use')

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2016 (ita)
# Thomas Nagy, 2008-2018 (ita)
"""
Nasm tool (asm processing)

View File

@ -25,3 +25,4 @@ def build(bld):
x.write('')
for (name, cls) in Task.classes.items():
cls.run = run

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# andersg at 0x63.nu 2007
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
"""
Support for Perl extensions. A C/C++ compiler is required::
@ -28,7 +28,7 @@ Support for Perl extensions. A C/C++ compiler is required::
"""
import os
from waflib import Task, Options, Utils
from waflib import Task, Options, Utils, Errors
from waflib.Configure import conf
from waflib.TaskGen import extension, feature, before_method
@ -40,7 +40,8 @@ def init_perlext(self):
*lib* prefix from library names.
"""
self.uselib = self.to_list(getattr(self, 'uselib', []))
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
if not 'PERLEXT' in self.uselib:
self.uselib.append('PERLEXT')
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
@extension('.xs')
@ -102,7 +103,7 @@ def check_perl_module(self, module):
self.start_msg('perl module %s' % module)
try:
r = self.cmd_and_log(cmd)
except Exception:
except Errors.WafError:
self.end_msg(False)
return None
self.end_msg(r or True)
@ -156,3 +157,4 @@ def options(opt):
"""
opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)

View File

@ -23,7 +23,7 @@ Support for Python, detect the headers and libraries and provide
"""
import os, sys
from waflib import Utils, Options, Errors, Logs, Task, Node
from waflib import Errors, Logs, Node, Options, Task, Utils
from waflib.TaskGen import extension, before_method, after_method, feature
from waflib.Configure import conf
@ -54,7 +54,7 @@ import sys, py_compile
py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
'''
"""
Piece of Python code used in :py:func:`waflib.Tools.python.pytask` for byte-compiling python files
Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
"""
DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
@ -83,7 +83,7 @@ def process_py(self, node):
"""
Add signature of .py file, so it will be byte-compiled when necessary
"""
assert(getattr(self, 'install_path')), 'add features="py"'
assert(hasattr(self, 'install_path')), 'add features="py"'
# where to install the python file
if self.install_path:
@ -557,7 +557,7 @@ def check_python_module(conf, module_name, condition=''):
conf.start_msg(msg)
try:
ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
except Exception:
except Errors.WafError:
conf.end_msg(False)
conf.fatal('Could not find the python module %r' % module_name)
@ -596,7 +596,7 @@ def configure(conf):
v.NOPYCACHE=Options.options.nopycache
if not v.PYTHON:
v.PYTHON = getattr(Options.options, 'python', None) or sys.executable
v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
v.PYTHON = Utils.to_list(v.PYTHON)
conf.find_program('python', var='PYTHON')
@ -628,3 +628,4 @@ def options(opt):
help='Installation path for python modules (py, platform-independent .py and .pyc files)')
pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
This tool helps with finding Qt5 tools and libraries,
@ -52,7 +52,7 @@ You also need to edit your sources accordingly:
incs = set(self.to_list(getattr(self, 'includes', '')))
for x in self.compiled_tasks:
incs.add(x.inputs[0].parent.path_from(self.path))
self.includes = list(incs)
self.includes = sorted(incs)
Note: another tool provides Qt processing that does not require
.moc includes, see 'playground/slow_qt/'.
@ -65,6 +65,8 @@ The detection uses pkg-config on Linux by default. To force static library detec
QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
"""
from __future__ import with_statement
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
@ -153,7 +155,7 @@ class qxx(Task.classes['cxx']):
# direct injection in the build phase (safe because called from the main thread)
gen = self.generator.bld.producer
gen.outstanding.appendleft(tsk)
gen.outstanding.append(tsk)
gen.total += 1
return tsk
@ -234,6 +236,7 @@ class XMLHandler(ContentHandler):
Parses ``.qrc`` files
"""
def __init__(self):
ContentHandler.__init__(self)
self.buf = []
self.files = []
def startElement(self, name, attrs):
@ -248,7 +251,7 @@ class XMLHandler(ContentHandler):
@extension(*EXT_RCC)
def create_rcc_task(self, node):
"Creates rcc and cxx tasks for ``.qrc`` files"
rcnode = node.change_ext('_rc.cpp')
rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
try:
@ -260,8 +263,21 @@ def create_rcc_task(self, node):
@extension(*EXT_UI)
def create_uic_task(self, node):
"Create uic tasks for user interface ``.ui`` definition files"
uictask = self.create_task('ui5', node)
uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
"""
If UIC file is used in more than one bld, we would have a conflict in parallel execution
It is not possible to change the file names (like .self.idx. as for objects) as they have
to be referenced by the source file, but we can assume that the transformation will be identical
and the tasks can be shared in a global cache.
"""
try:
uic_cache = self.bld.uic_cache
except AttributeError:
uic_cache = self.bld.uic_cache = {}
if node not in uic_cache:
uictask = uic_cache[node] = self.create_task('ui5', node)
uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
@extension('.ts')
def add_lang(self, node):
@ -313,11 +329,11 @@ def apply_qt5(self):
for x in self.to_list(self.lang):
if isinstance(x, str):
x = self.path.find_resource(x + '.ts')
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
if getattr(self, 'update', None) and Options.options.trans_qt5:
cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
for x in qmtasks:
self.create_task('trans_update', cxxnodes, x.inputs)
@ -325,14 +341,15 @@ def apply_qt5(self):
qmnodes = [x.outputs[0] for x in qmtasks]
rcnode = self.langname
if isinstance(rcnode, str):
rcnode = self.path.find_or_declare(rcnode + '.qrc')
rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
t = self.create_task('qm2rcc', qmnodes, rcnode)
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst = []
for flag in self.to_list(self.env.CXXFLAGS):
if len(flag) < 2: continue
if len(flag) < 2:
continue
f = flag[0:2]
if f in ('-D', '-I', '/D', '/I'):
if (f[0] == '/'):
@ -368,19 +385,18 @@ class rcc(Task.Task):
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
fi = open(self.inputs[0].abspath(), 'r')
try:
parser.parse(fi)
finally:
fi.close()
with open(self.inputs[0].abspath(), 'r') as f:
parser.parse(f)
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd: nodes.append(nd)
else: names.append(x)
if nd:
nodes.append(nd)
else:
names.append(x)
return (nodes, names)
class moc(Task.Task):
@ -456,7 +472,6 @@ def configure(self):
self.fatal('Could not build a simple Qt application')
# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
from waflib import Utils
if Utils.unversioned_sys_platform() == 'freebsd':
frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
try:
@ -572,7 +587,7 @@ def find_qt5_binaries(self):
uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
self.end_msg(uicver)
if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path')
self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
find_bin(['moc-qt5', 'moc'], 'QT_MOC')
find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
@ -658,16 +673,14 @@ def find_qt5_libraries(self):
self.msg('Checking for %s' % i, False, 'YELLOW')
env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
else:
for j in ('', 'd'):
k = '_DEBUG' if j == 'd' else ''
ret = self.find_single_qt5_lib(i + j, uselib + k, env.QTLIBS, qtincludes, force_static)
if not force_static and not ret:
ret = self.find_single_qt5_lib(i + j, uselib + k, env.QTLIBS, qtincludes, True)
self.msg('Checking for %s' % (i + j), ret, 'GREEN' if ret else 'YELLOW')
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
if not force_static and not ret:
ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
else:
path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (
self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS)
for i in self.qt5_vars_debug + self.qt5_vars:
for i in self.qt5_vars:
self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
@conf
@ -693,7 +706,6 @@ def simplify_qt5_libs(self):
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(self.qt5_vars, 'LIBPATH_QTCORE')
process_lib(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')
@conf
def add_qt5_rpath(self):
@ -716,7 +728,6 @@ def add_qt5_rpath(self):
accu.append('-Wl,--rpath='+lib)
env['RPATH_' + var] = accu
process_rpath(self.qt5_vars, 'LIBPATH_QTCORE')
process_rpath(self.qt5_vars_debug, 'LIBPATH_QTCORE_DEBUG')
@conf
def set_qt5_libs_to_check(self):
@ -743,10 +754,6 @@ def set_qt5_libs_to_check(self):
if qtextralibs:
self.qt5_vars.extend(qtextralibs.split(','))
if not hasattr(self, 'qt5_vars_debug'):
self.qt5_vars_debug = [a + '_DEBUG' for a in self.qt5_vars]
self.qt5_vars_debug = Utils.to_list(self.qt5_vars_debug)
@conf
def set_qt5_defines(self):
if sys.platform != 'win32':
@ -754,7 +761,6 @@ def set_qt5_defines(self):
for x in self.qt5_vars:
y=x.replace('Qt5', 'Qt')[2:].upper()
self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
def options(opt):
"""
@ -766,3 +772,4 @@ def options(opt):
opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')

View File

@ -5,7 +5,7 @@
#!/usr/bin/env python
# encoding: utf-8
# daniel.svensson at purplescout.se 2008
# Thomas Nagy 2016 (ita)
# Thomas Nagy 2016-2018 (ita)
"""
Support for Ruby extensions. A C/C++ compiler is required::
@ -27,7 +27,7 @@ Support for Ruby extensions. A C/C++ compiler is required::
"""
import os
from waflib import Options, Utils, Task
from waflib import Errors, Options, Task, Utils
from waflib.TaskGen import before_method, feature, extension
from waflib.Configure import conf
@ -64,13 +64,13 @@ def check_ruby_version(self, minver=()):
try:
version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
except Exception:
except Errors.WafError:
self.fatal('could not determine ruby version')
self.env.RUBY_VERSION = version
try:
ver = tuple(map(int, version.split(".")))
except Exception:
ver = tuple(map(int, version.split('.')))
except Errors.WafError:
self.fatal('unsupported ruby version %r' % version)
cver = ''
@ -155,7 +155,7 @@ def check_ruby_module(self, module_name):
self.start_msg('Ruby module %s' % module_name)
try:
self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
except Exception:
except Errors.WafError:
self.end_msg(False)
self.fatal('Could not find the ruby module %r' % module_name)
self.end_msg(True)
@ -187,3 +187,4 @@ def options(opt):
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')

View File

@ -4,9 +4,10 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@ -19,7 +20,7 @@ def find_scc(conf):
cc = conf.find_program('cc', var='CC')
try:
conf.cmd_and_log(cc + ['-flags'])
except Exception:
except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc)
v.CC_NAME = 'sun'
conf.get_suncc_version(cc)
@ -67,3 +68,4 @@ def configure(conf):
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()

View File

@ -4,9 +4,10 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@ -19,7 +20,7 @@ def find_sxx(conf):
cc = conf.find_program(['CC', 'c++'], var='CXX')
try:
conf.cmd_and_log(cc + ['-flags'])
except Exception:
except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc)
v.CXX_NAME = 'sun'
conf.get_suncc_version(cc)
@ -67,3 +68,4 @@ def configure(conf):
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()

View File

@ -4,7 +4,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2016 (ita)
# Thomas Nagy, 2006-2018 (ita)
"""
TeX/LaTeX/PDFLaTeX/XeLaTeX support
@ -52,20 +52,23 @@ def bibunitscan(self):
node = self.inputs[0]
nodes = []
if not node: return nodes
if not node:
return nodes
code = node.read()
for match in re_bibunit.finditer(code):
path = match.group('file')
if path:
found = None
for k in ('', '.bib'):
# add another loop for the tex include paths?
Logs.debug('tex: trying %s%s', path, k)
fi = node.parent.find_resource(path + k)
if fi:
found = True
nodes.append(fi)
# no break, people are crazy
else:
# no break
if not found:
Logs.debug('tex: could not find %s', path)
Logs.debug('tex: found the following bibunit files: %s', nodes)
@ -160,14 +163,14 @@ class tex(Task.Task):
nodes = []
names = []
seen = []
if not node: return (nodes, names)
if not node:
return (nodes, names)
def parse_node(node):
if node in seen:
return
seen.append(node)
code = node.read()
global re_tex
for match in re_tex.finditer(code):
multibib = match.group('type')
@ -541,3 +544,4 @@ def configure(self):
except self.errors.ConfigurationError:
pass
v.DVIPSFLAGS = '-Ppdf'

Some files were not shown because too many files have changed in this diff Show More