1
0
mirror of https://github.com/samba-team/samba.git synced 2025-01-08 21:18:16 +03:00

build:wafsamba: Build on waf 1.9

Signed-off-by: Thomas Nagy <tnagy@waf.io>
Reviewed-by: Alexander Bokovoy <ab@samba.org>
Reviewed-by: Andrew Bartlett <abartlet@samba.org>
This commit is contained in:
Thomas Nagy 2016-03-26 13:18:07 +01:00 committed by Andrew Bartlett
parent f3e349bebc
commit 8077f462c9
23 changed files with 507 additions and 449 deletions

129
buildtools/bin/waf vendored
View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: ISO-8859-1
# Thomas Nagy, 2005-2010
# encoding: ISO8859-1
# Thomas Nagy, 2005-2015
"""
Redistribution and use in source and binary forms, with or without
@ -30,25 +30,22 @@ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os, sys
if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3")
import os, sys, inspect
if 'PSYCOWAF' in os.environ:
try:import psyco;psyco.full()
except:pass
VERSION="1.5.19"
VERSION="1.9.10"
REVISION="x"
INSTALL="x"
C1='x'
C2='x'
GIT="x"
INSTALL=''
C1='#>'
C2='#6'
C3='#4'
cwd = os.getcwd()
join = os.path.join
WAF='waf'
def b(x):
return x
if sys.hexversion>0x300000f:
WAF='waf3'
def b(x):
@ -58,20 +55,110 @@ def err(m):
print(('\033[91mError: %s\033[0m' % m))
sys.exit(1)
def test(dir):
try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir)
def unpack_wafdir(dir, src):
f = open(src,'rb')
c = 'corrupt archive (%d)'
while 1:
line = f.readline()
if not line: err('run waf-light from a folder containing waflib')
if line == b('#==>\n'):
txt = f.readline()
if not txt: err(c % 1)
if f.readline() != b('#<==\n'): err(c % 2)
break
if not txt: err(c % 3)
txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r')).replace(b(C3), b('\x00'))
import shutil, tarfile
try: shutil.rmtree(dir)
except OSError: pass
try:
for x in ('Tools', 'extras'):
os.makedirs(join(dir, 'waflib', x))
except OSError:
err("Cannot unpack waf lib into %s\nMove waf in a writable directory" % dir)
os.chdir(dir)
tmp = 't.bz2'
t = open(tmp,'wb')
try: t.write(txt)
finally: t.close()
try:
t = tarfile.open(tmp)
except:
try:
os.system('bunzip2 t.bz2')
t = tarfile.open('t')
tmp = 't'
except:
os.chdir(cwd)
try: shutil.rmtree(dir)
except OSError: pass
err("Waf cannot be unpacked, check that bzip2 support is present")
try:
for x in t: t.extract(x)
finally:
t.close()
for x in ('Tools', 'extras'):
os.chmod(join('waflib',x), 493)
if sys.hexversion<0x300000f:
sys.path = [join(dir, 'waflib')] + sys.path
import fixpy2
fixpy2.fixdir(dir)
os.remove(tmp)
os.chdir(cwd)
try: dir = unicode(dir, 'mbcs')
except: pass
try:
from ctypes import windll
windll.kernel32.SetFileAttributesW(dir, 2)
except:
pass
def test(dir):
try:
os.stat(join(dir, 'waflib'))
return os.path.abspath(dir)
except OSError:
pass
def find_lib():
return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third_party/waf'))
wafdir = find_lib()
w = join(wafdir, 'wafadmin')
t = join(w, 'Tools')
f = join(w, '3rdparty')
sys.path = [w, t, f] + sys.path
sys.path.insert(0, wafdir)
if __name__ == '__main__':
import Scripting
Scripting.prepare(t, cwd, VERSION, wafdir)
# TODO: remove these when possible
from waflib.extras import compat15
import sys
from waflib.Tools import ccroot, c, ar, compiler_c, gcc
sys.modules['cc'] = c
sys.modules['ccroot'] = ccroot
sys.modules['ar'] = ar
sys.modules['compiler_cc'] = compiler_c
sys.modules['gcc'] = gcc
from waflib import Options
Options.lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
if os.path.isfile(Options.lockfile) and os.stat(Options.lockfile).st_size == 0:
os.environ['NOCLIMB'] = "1"
# there is a single top-level, but libraries must build independently
os.environ['NO_LOCK_IN_TOP'] = "1"
from waflib import Task
class o(object):
display = None
Task.classes['cc_link'] = o
from waflib import Scripting
Scripting.waf_entry_point(cwd, VERSION, wafdir)

View File

@ -18,7 +18,7 @@ def DEFINE(conf, d, v, add_to_cflags=False, quote=False):
'''define a config option'''
conf.define(d, v, quote=quote)
if add_to_cflags:
conf.env.append_value('CCDEFINES', d + '=' + str(v))
conf.env.append_value('CFLAGS', '-D%s=%s' % (d, str(v)))
def hlist_to_string(conf, headers=None):
'''convert a headers list to a set of #include lines'''
@ -99,7 +99,7 @@ def CHECK_HEADER(conf, h, add_headers=False, lib=None):
ret = conf.check(fragment='%s\nint main(void) { return 0; }' % hdrs,
type='nolink',
execute=0,
ccflags=ccflags,
cflags=ccflags,
mandatory=False,
includes=cpppath,
uselib=lib.upper(),
@ -383,12 +383,10 @@ def CHECK_CODE(conf, code, define,
else:
execute = 0
defs = conf.get_config_header()
if addmain:
fragment='%s\n%s\n int main(void) { %s; return 0; }\n' % (defs, hdrs, code)
fragment='%s\n int main(void) { %s; return 0; }\n' % (hdrs, code)
else:
fragment='%s\n%s\n%s\n' % (defs, hdrs, code)
fragment='%s\n%s\n' % (hdrs, code)
if msg is None:
msg="Checking for %s" % define
@ -431,11 +429,11 @@ def CHECK_CODE(conf, code, define,
conf.COMPOUND_START(msg)
ret = conf.check(fragment=fragment,
try:
ret = conf.check(fragment=fragment,
execute=execute,
define_name = define,
mandatory = mandatory,
ccflags=cflags,
cflags=cflags,
ldflags=ldflags,
includes=includes,
uselib=uselib,
@ -444,22 +442,22 @@ def CHECK_CODE(conf, code, define,
quote=quote,
exec_args=exec_args,
define_ret=define_ret)
if not ret and CONFIG_SET(conf, define):
# sometimes conf.check() returns false, but it
# sets the define. Maybe a waf bug?
ret = True
if ret:
except Exception:
if always:
conf.DEFINE(define, 0)
conf.COMPOUND_END(False)
if mandatory:
raise
return False
else:
# success
if not define_ret:
conf.DEFINE(define, 1)
conf.COMPOUND_END(True)
else:
conf.COMPOUND_END(conf.env[define])
conf.DEFINE(define, ret, quote=quote)
conf.COMPOUND_END(ret)
return True
if always:
conf.DEFINE(define, 0)
conf.COMPOUND_END(False)
return False
@conf
@ -490,6 +488,7 @@ def CHECK_CFLAGS(conf, cflags, fragment='int main(void) { return 0; }\n'):
check_cflags.extend(conf.env['WERROR_CFLAGS'])
return conf.check(fragment=fragment,
execute=0,
mandatory=False,
type='nolink',
ccflags=check_cflags,
msg="Checking compiler accepts %s" % cflags)
@ -547,12 +546,15 @@ def library_flags(self, libs):
# note that we do not add the -I and -L in here, as that is added by the waf
# core. Adding it here would just change the order that it is put on the link line
# which can cause system paths to be added before internal libraries
extra_ccflags = TO_LIST(getattr(self.env, 'CCFLAGS_%s' % lib.upper(), []))
extra_ccflags = TO_LIST(getattr(self.env, 'CFLAGS_%s' % lib.upper(), []))
extra_ldflags = TO_LIST(getattr(self.env, 'LDFLAGS_%s' % lib.upper(), []))
extra_cpppath = TO_LIST(getattr(self.env, 'CPPPATH_%s' % lib.upper(), []))
ccflags.extend(extra_ccflags)
ldflags.extend(extra_ldflags)
cpppath.extend(extra_cpppath)
extra_cpppath = TO_LIST(getattr(self.env, 'INCLUDES_%s' % lib.upper(), []))
cpppath.extend(extra_cpppath)
if 'EXTRA_LDFLAGS' in self.env:
ldflags.extend(self.env['EXTRA_LDFLAGS'])
@ -585,9 +587,9 @@ int foo()
(ccflags, ldflags, cpppath) = library_flags(conf, lib)
if shlib:
res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
res = conf.check(features='c cshlib', fragment=fragment, lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
else:
res = conf.check(lib=lib, uselib_store=lib, ccflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
res = conf.check(lib=lib, uselib_store=lib, cflags=ccflags, ldflags=ldflags, uselib=lib.upper(), mandatory=False)
if not res:
if mandatory:
@ -793,9 +795,12 @@ int main(void) {
conf.env['EXTRA_LDFLAGS'].extend(conf.env['ADDITIONAL_LDFLAGS'])
if path is None:
conf.write_config_header('config.h', top=True)
conf.write_config_header('default/config.h', top=True, remove=False)
else:
conf.write_config_header(path)
conf.write_config_header(os.path.join(conf.variant, path), remove=False)
for key in conf.env.define_key:
conf.undefine(key, from_env=False)
conf.env.define_key = []
conf.SAMBA_CROSS_CHECK_COMPLETE()
@ -918,6 +923,3 @@ def SAMBA_CHECK_UNDEFINED_SYMBOL_FLAGS(conf):
if conf.CHECK_LDFLAGS(['-undefined', 'dynamic_lookup']):
conf.env.undefined_ignore_ldflags = ['-undefined', 'dynamic_lookup']
@conf
def CHECK_CFG(self, *k, **kw):
return self.check_cfg(*k, **kw)

View File

@ -16,7 +16,7 @@ def SAMBA_AUTOPROTO(bld, header, source):
target = header,
update_outputs=True,
ext_out='.c',
before ='cc',
before ='c',
rule = '${PERL} "${SCRIPT}/mkproto.pl" --srcdir=.. --builddir=. --public=/dev/null --private="${TGT}" ${SRC}'
)
t.env.SCRIPT = os.path.join(bld.srcnode.abspath(), 'source4/script')

View File

@ -9,6 +9,7 @@ from samba_utils import TO_LIST, ADD_LD_LIBRARY_PATH
def add_option(self, *k, **kw):
'''syntax help: provide the "match" attribute to opt.add_option() so that folders can be added to specific config tests'''
Options.parser = self
match = kw.get('match', [])
if match:
del kw['match']
@ -486,7 +487,7 @@ def CHECK_XSLTPROC_MANPAGES(conf):
return False
s='http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl'
conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.XSLTPROC, s),
conf.CHECK_COMMAND('%s --nonet %s 2> /dev/null' % (conf.env.get_flat('XSLTPROC'), s),
msg='Checking for stylesheet %s' % s,
define='XSLTPROC_MANPAGES', on_target=False,
boolean=True)

View File

@ -85,7 +85,7 @@ def build_dependencies(self):
# extra link flags from pkg_config
libs = self.final_syslibs.copy()
(ccflags, ldflags, cpppath) = library_flags(self, list(libs))
(cflags, ldflags, cpppath) = library_flags(self, list(libs))
new_ldflags = getattr(self, 'samba_ldflags', [])[:]
new_ldflags.extend(ldflags)
self.ldflags = new_ldflags
@ -102,7 +102,7 @@ def build_dependencies(self):
self.sname, self.uselib, self.uselib_local, self.add_objects)
if self.samba_type in ['SUBSYSTEM']:
# this is needed for the ccflags of libs that come from pkg_config
# this is needed for the cflags of libs that come from pkg_config
self.uselib = list(self.final_syslibs)
self.uselib.extend(list(self.direct_syslibs))
for lib in self.final_libs:
@ -235,7 +235,7 @@ def add_init_functions(self):
if sentinel == 'NULL':
proto = "extern void __%s_dummy_module_proto(void)" % (sname)
cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (sname, proto))
self.ccflags = cflags
self.cflags = cflags
return
for m in modules:
@ -257,7 +257,7 @@ def add_init_functions(self):
proto += '_MODULE_PROTO(%s)' % f
proto += "extern void __%s_dummy_module_proto(void)" % (m)
cflags.append('-DSTATIC_%s_MODULES_PROTO=%s' % (m, proto))
self.ccflags = cflags
self.cflags = cflags
def check_duplicate_sources(bld, tgt_list):
@ -271,6 +271,9 @@ def check_duplicate_sources(bld, tgt_list):
tpath = os.path.normpath(os_path_relpath(t.path.abspath(bld.env), t.env.BUILD_DIRECTORY + '/default'))
obj_sources = set()
for s in source_list:
if not isinstance(s, str):
print('strange path in check_duplicate_sources %r' % s)
s = s.abspath()
p = os.path.normpath(os.path.join(tpath, s))
if p in obj_sources:
Logs.error("ERROR: source %s appears twice in target '%s'" % (p, t.sname))
@ -372,7 +375,7 @@ def add_samba_attributes(bld, tgt_list):
t.samba_abspath = t.path.abspath(bld.env)
t.samba_deps_extended = t.samba_deps[:]
t.samba_includes_extended = TO_LIST(t.samba_includes)[:]
t.ccflags = getattr(t, 'samba_cflags', '')
t.cflags = getattr(t, 'samba_cflags', '')
def replace_grouping_libraries(bld, tgt_list):
'''replace dependencies based on grouping libraries
@ -951,7 +954,7 @@ savedeps_inputs = ['samba_deps', 'samba_includes', 'local_include', 'local_incl
'source', 'grouping_library', 'samba_ldflags', 'allow_undefined_symbols',
'use_global_deps', 'global_include' ]
savedeps_outputs = ['uselib', 'uselib_local', 'add_objects', 'includes',
'ccflags', 'ldflags', 'samba_deps_extended', 'final_libs']
'cflags', 'ldflags', 'samba_deps_extended', 'final_libs']
savedeps_outenv = ['INC_PATHS']
savedeps_envvars = ['NONSHARED_BINARIES', 'GLOBAL_DEPENDENCIES', 'EXTRA_CFLAGS', 'EXTRA_LDFLAGS', 'EXTRA_INCLUDES' ]
savedeps_caches = ['GLOBAL_DEPENDENCIES', 'TARGET_TYPE', 'INIT_FUNCTIONS', 'SYSLIB_DEPS']

View File

@ -4,7 +4,7 @@ import subprocess
def find_git(env=None):
"""Find the git binary."""
if env is not None and 'GIT' in env:
return env['GIT']
return env.get_flat('GIT')
# Get version from GIT
if os.path.exists("/usr/bin/git"):

View File

@ -45,7 +45,7 @@ def install_binary(self):
# tell waf to install the right binary
bld.install_as(os.path.join(install_path, orig_target),
os.path.join(self.path.abspath(bld.env), self.target),
self.path.find_or_declare(self.target),
chmod=MODE_755)
@ -143,8 +143,9 @@ def install_library(self):
# tell waf to install the library
bld.install_as(os.path.join(install_path, install_name),
os.path.join(self.path.abspath(bld.env), inst_name),
self.path.find_or_declare(inst_name),
chmod=MODE_755)
if install_link and install_link != install_name:
# and the symlink if needed
bld.symlink_as(os.path.join(install_path, install_link), os.path.basename(install_name))

View File

@ -1,269 +0,0 @@
# This file contains waf optimisations for Samba
# most of these optimisations are possible because of the restricted build environment
# that Samba has. For example, Samba doesn't attempt to cope with Win32 paths during the
# build, and Samba doesn't need build varients
# overall this makes some build tasks quite a bit faster
import os
import Build, Utils, Node
from TaskGen import feature, after, before
import preproc
@feature('c', 'cc', 'cxx')
@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
def apply_incpaths(self):
lst = []
try:
kak = self.bld.kak
except AttributeError:
kak = self.bld.kak = {}
# TODO move the uselib processing out of here
for lib in self.to_list(self.uselib):
for path in self.env['CPPPATH_' + lib]:
if not path in lst:
lst.append(path)
if preproc.go_absolute:
for path in preproc.standard_includes:
if not path in lst:
lst.append(path)
for path in self.to_list(self.includes):
if not path in lst:
if preproc.go_absolute or path[0] != '/': # os.path.isabs(path):
lst.append(path)
else:
self.env.prepend_value('CPPPATH', path)
for path in lst:
node = None
if path[0] == '/': # os.path.isabs(path):
if preproc.go_absolute:
node = self.bld.root.find_dir(path)
elif path[0] == '#':
node = self.bld.srcnode
if len(path) > 1:
try:
node = kak[path]
except KeyError:
kak[path] = node = node.find_dir(path[1:])
else:
try:
node = kak[(self.path.id, path)]
except KeyError:
kak[(self.path.id, path)] = node = self.path.find_dir(path)
if node:
self.env.append_value('INC_PATHS', node)
@feature('c', 'cc')
@after('apply_incpaths')
def apply_obj_vars_cc(self):
"""after apply_incpaths for INC_PATHS"""
env = self.env
app = env.append_unique
cpppath_st = env['CPPPATH_ST']
lss = env['_CCINCFLAGS']
try:
cac = self.bld.cac
except AttributeError:
cac = self.bld.cac = {}
# local flags come first
# set the user-defined includes paths
for i in env['INC_PATHS']:
try:
lss.extend(cac[i.id])
except KeyError:
cac[i.id] = [cpppath_st % i.bldpath(env), cpppath_st % i.srcpath(env)]
lss.extend(cac[i.id])
env['_CCINCFLAGS'] = lss
# set the library include paths
for i in env['CPPPATH']:
app('_CCINCFLAGS', cpppath_st % i)
import Node, Environment
def vari(self):
return "default"
Environment.Environment.variant = vari
def variant(self, env):
if not env: return 0
elif self.id & 3 == Node.FILE: return 0
else: return "default"
Node.Node.variant = variant
import TaskGen, Task
def create_task(self, name, src=None, tgt=None):
task = Task.TaskBase.classes[name](self.env, generator=self)
if src:
task.set_inputs(src)
if tgt:
task.set_outputs(tgt)
return task
TaskGen.task_gen.create_task = create_task
def hash_constraints(self):
a = self.attr
sum = hash((str(a('before', '')),
str(a('after', '')),
str(a('ext_in', '')),
str(a('ext_out', '')),
self.__class__.maxjobs))
return sum
Task.TaskBase.hash_constraints = hash_constraints
def hash_env_vars(self, env, vars_lst):
idx = str(id(env)) + str(vars_lst)
try:
return self.cache_sig_vars[idx]
except KeyError:
pass
m = Utils.md5()
m.update(''.join([str(env[a]) for a in vars_lst]))
ret = self.cache_sig_vars[idx] = m.digest()
return ret
Build.BuildContext.hash_env_vars = hash_env_vars
def store_fast(self, filename):
file = open(filename, 'wb')
data = self.get_merged_dict()
try:
Build.cPickle.dump(data, file, -1)
finally:
file.close()
Environment.Environment.store_fast = store_fast
def load_fast(self, filename):
file = open(filename, 'rb')
try:
data = Build.cPickle.load(file)
finally:
file.close()
self.table.update(data)
Environment.Environment.load_fast = load_fast
def is_this_a_static_lib(self, name):
try:
cache = self.cache_is_this_a_static_lib
except AttributeError:
cache = self.cache_is_this_a_static_lib = {}
try:
return cache[name]
except KeyError:
ret = cache[name] = 'cstaticlib' in self.bld.get_tgen_by_name(name).features
return ret
TaskGen.task_gen.is_this_a_static_lib = is_this_a_static_lib
def shared_ancestors(self):
try:
cache = self.cache_is_this_a_static_lib
except AttributeError:
cache = self.cache_is_this_a_static_lib = {}
try:
return cache[id(self)]
except KeyError:
ret = []
if 'cshlib' in self.features: # or 'cprogram' in self.features:
if getattr(self, 'uselib_local', None):
lst = self.to_list(self.uselib_local)
ret = [x for x in lst if not self.is_this_a_static_lib(x)]
cache[id(self)] = ret
return ret
TaskGen.task_gen.shared_ancestors = shared_ancestors
@feature('c', 'cc', 'cxx')
@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
def apply_lib_vars(self):
"""after apply_link because of 'link_task'
after default_cc because of the attribute 'uselib'"""
# after 'apply_core' in case if 'cc' if there is no link
env = self.env
app = env.append_value
seen_libpaths = set([])
# OPTIMIZATION 1: skip uselib variables already added (700ms)
seen_uselib = set([])
# 1. the case of the libs defined in the project (visit ancestors first)
# the ancestors external libraries (uselib) will be prepended
self.uselib = self.to_list(self.uselib)
names = self.to_list(self.uselib_local)
seen = set([])
tmp = Utils.deque(names) # consume a copy of the list of names
while tmp:
lib_name = tmp.popleft()
# visit dependencies only once
if lib_name in seen:
continue
y = self.get_tgen_by_name(lib_name)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
y.post()
seen.add(lib_name)
# OPTIMIZATION 2: pre-compute ancestors shared libraries (100ms)
tmp.extend(y.shared_ancestors())
# link task and flags
if getattr(y, 'link_task', None):
link_name = y.target[y.target.rfind('/') + 1:]
if 'cstaticlib' in y.features:
app('STATICLIB', link_name)
elif 'cshlib' in y.features or 'cprogram' in y.features:
# WARNING some linkers can link against programs
app('LIB', link_name)
# the order
self.link_task.set_run_after(y.link_task)
# for the recompilation
dep_nodes = getattr(self.link_task, 'dep_nodes', [])
self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
# OPTIMIZATION 3: reduce the amount of function calls
# add the link path too
par = y.link_task.outputs[0].parent
if id(par) not in seen_libpaths:
seen_libpaths.add(id(par))
tmp_path = par.bldpath(self.env)
if not tmp_path in env['LIBPATH']:
env.prepend_value('LIBPATH', tmp_path)
# add ancestors uselib too - but only propagate those that have no staticlib
for v in self.to_list(y.uselib):
if v not in seen_uselib:
seen_uselib.add(v)
if not env['STATICLIB_' + v]:
if not v in self.uselib:
self.uselib.insert(0, v)
# 2. the case of the libs defined outside
for x in self.uselib:
for v in self.p_flag_vars:
val = self.env[v + '_' + x]
if val:
self.env.append_value(v, val)

View File

@ -15,7 +15,7 @@ def SAMBA_CHECK_PERL(conf, mandatory=True, version=(5,0,0)):
conf.check_perl_version(version)
def read_perl_config_var(cmd):
return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
return Utils.to_list(Utils.cmd_output([conf.env.get_flat('PERL'), '-MConfig', '-e', cmd]))
def check_perl_config_var(var):
conf.start_msg("Checking for perl $Config{%s}:" % var)

View File

@ -1,7 +1,7 @@
# waf build tool for building IDL files with pidl
import os
import Build
import Build, Utils
from TaskGen import feature, before
from samba_utils import SET_TARGET_TYPE, TO_LIST, LOCAL_CACHE
@ -78,7 +78,7 @@ def SAMBA_PIDL(bld, pname, source,
t = bld(rule='cd .. && %s %s ${PERL} "${PIDL}" --quiet ${OPTIONS} --outputdir ${OUTPUTDIR} -- "${SRC[0].abspath(env)}"' % (cpp, cc),
ext_out = '.c',
before = 'cc',
before = 'c',
update_outputs = True,
shell = True,
source = source,
@ -91,7 +91,7 @@ def SAMBA_PIDL(bld, pname, source,
t.env.PIDL = os.path.join(bld.srcnode.abspath(), 'pidl/pidl')
t.env.OPTIONS = TO_LIST(options)
t.env.OUTPUTDIR = bld.bldnode.name + '/' + bld.path.find_dir(output_dir).bldpath(t.env)
t.env.OUTPUTDIR = bld.bldnode.parent.name + '/default/' + bld.path.find_dir(output_dir).path_from(bld.srcnode)
if generate_tables and table_header_idx is not None:
pidl_headers = LOCAL_CACHE(bld, 'PIDL_HEADERS')
@ -117,13 +117,14 @@ Build.BuildContext.SAMBA_PIDL_LIST = SAMBA_PIDL_LIST
@before('exec_rule')
def collect(self):
pidl_headers = LOCAL_CACHE(self.bld, 'PIDL_HEADERS')
self.source = Utils.to_list(self.source)
for (name, hd) in pidl_headers.items():
y = self.bld.get_tgen_by_name(name)
self.bld.ASSERT(y is not None, 'Failed to find PIDL header %s' % name)
y.post()
for node in hd:
self.bld.ASSERT(node is not None, 'Got None as build node generating PIDL table for %s' % name)
self.source += " " + node.relpath_gen(self.path)
self.source.append(node)
def SAMBA_PIDL_TABLES(bld, name, target):
@ -133,7 +134,7 @@ def SAMBA_PIDL_TABLES(bld, name, target):
features = 'collect',
rule = '${PERL} ${SRC} --output ${TGT} | sed "s|default/||" > ${TGT}',
ext_out = '.c',
before = 'cc',
before = 'c',
update_outputs = True,
shell = True,
source = '../../librpc/tables.pl',

View File

@ -48,8 +48,10 @@ def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
conf.msg("python headers", "Check disabled due to --disable-python")
# we don't want PYTHONDIR in config.h, as otherwise changing
# --prefix causes a complete rebuild
del(conf.env.defines['PYTHONDIR'])
del(conf.env.defines['PYTHONARCHDIR'])
conf.env.DEFINES = [x for x in conf.env.DEFINES
if not x.startswith('PYTHONDIR=')
and not x.startswith('PYTHONARCHDIR=')]
return
if conf.env["python_headers_checked"] == []:
@ -71,13 +73,14 @@ def SAMBA_CHECK_PYTHON_HEADERS(conf, mandatory=True):
# we don't want PYTHONDIR in config.h, as otherwise changing
# --prefix causes a complete rebuild
del(conf.env.defines['PYTHONDIR'])
del(conf.env.defines['PYTHONARCHDIR'])
conf.env.DEFINES = [x for x in conf.env.DEFINES
if not x.startswith('PYTHONDIR=')
and not x.startswith('PYTHONARCHDIR=')]
def _check_python_headers(conf, mandatory):
try:
Configure.ConfigurationError
conf.check_python_headers(mandatory=mandatory)
conf.check_python_headers()
except Configure.ConfigurationError:
if mandatory:
raise
@ -95,6 +98,11 @@ def _check_python_headers(conf, mandatory):
conf.env.append_unique('LIBPATH_PYEMBED', lib[2:]) # strip '-L'
conf.env['LINKFLAGS_PYEMBED'].remove(lib)
# same as in waf 1.5, keep only '-fno-strict-aliasing'
# and ignore defines such as NDEBUG _FORTIFY_SOURCE=2
conf.env.DEFINES_PYEXT = []
conf.env.CFLAGS_PYEXT = ['-fno-strict-aliasing']
return
def PYTHON_BUILD_IS_ENABLED(self):

View File

@ -1,11 +1,11 @@
# a waf tool to add autoconf-like macros to the configure section
# and for SAMBA_ macros for building libraries, binaries etc
import os, sys, re, fnmatch, shlex
import os, sys, re, fnmatch, shlex, inspect
from optparse import SUPPRESS_HELP
import Build, Options, Utils, Task, Logs, Configure
from waflib import Build, Options, Utils, Task, Logs, Configure, Errors
from TaskGen import feature, before, after
from Configure import conf, ConfigurationContext
from Configure import ConfigurationContext
from Logs import debug
# TODO: make this a --option
@ -16,6 +16,30 @@ LIB_PATH="shared"
MODE_644 = int('644', 8)
MODE_755 = int('755', 8)
def conf(f):
# override in order to propagate the argument "mandatory"
def fun(*k, **kw):
mandatory = True
if 'mandatory' in kw:
mandatory = kw['mandatory']
del kw['mandatory']
try:
return f(*k, **kw)
except Errors.ConfigurationError:
if mandatory:
raise
fun.__name__ = f.__name__
if 'mandatory' in inspect.getsource(f):
fun = f
setattr(Configure.ConfigurationContext, f.__name__, fun)
setattr(Build.BuildContext, f.__name__, fun)
return f
Configure.conf = conf
Configure.conftest = conf
@conf
def SET_TARGET_TYPE(ctx, target, value):
'''set the target type of a target'''
@ -201,6 +225,8 @@ def subst_vars_error(string, env):
if not vname in env:
raise KeyError("Failed to find variable %s in %s" % (vname, string))
v = env[vname]
if isinstance(v, list):
v = ' '.join(v)
out.append(v)
return ''.join(out)
@ -355,6 +381,8 @@ def RUN_PYTHON_TESTS(testfiles, pythonpath=None, extra_env=None):
pythonpath = os.path.join(Utils.g_module.blddir, 'python')
result = 0
for interp in env.python_interpreters:
if not isinstance(interp, str):
interp = ' '.join(interp)
for testfile in testfiles:
cmd = "PYTHONPATH=%s %s %s" % (pythonpath, interp, testfile)
if extra_env:
@ -412,9 +440,8 @@ def LOAD_ENVIRONMENT():
import Environment
env = Environment.Environment()
try:
env.load('.lock-wscript')
env.load(env.blddir + '/c4che/default.cache.py')
except:
env.load('bin/c4che/default_cache.py')
except (OSError, IOError):
pass
return env
@ -446,6 +473,8 @@ def RECURSE(ctx, directory):
return
visited_dirs.add(key)
relpath = os_path_relpath(abspath, ctx.curdir)
if 'waflib.extras.compat15' in sys.modules:
return ctx.recurse(relpath)
if ctxclass == 'Handler':
return ctx.sub_options(relpath)
if ctxclass == 'ConfigurationContext':
@ -561,7 +590,7 @@ def map_shlib_extension(ctx, name, python=False):
if python:
return ctx.env.pyext_PATTERN % root1
else:
(root2, ext2) = os.path.splitext(ctx.env.shlib_PATTERN)
(root2, ext2) = os.path.splitext(ctx.env.cshlib_PATTERN)
return root1+ext2
Build.BuildContext.map_shlib_extension = map_shlib_extension
@ -583,7 +612,7 @@ def make_libname(ctx, name, nolibprefix=False, version=None, python=False):
if python:
libname = apply_pattern(name, ctx.env.pyext_PATTERN)
else:
libname = apply_pattern(name, ctx.env.shlib_PATTERN)
libname = apply_pattern(name, ctx.env.cshlib_PATTERN)
if nolibprefix and libname[0:3] == 'lib':
libname = libname[3:]
if version:

View File

@ -0,0 +1,267 @@
# compatibility layer for building with more recent waf versions
import os, shlex, sys
import Build, Configure, Node, Utils, Options, Logs
from waflib import ConfigSet
from TaskGen import feature, after
from Configure import conf, ConfigurationContext
from waflib.Tools import bison, flex
sys.modules['bison'] = bison
sys.modules['flex'] = flex
for y in (Build.BuildContext, Build.CleanContext, Build.InstallContext, Build.UninstallContext, Build.ListContext):
class tmp(y):
variant = 'default'
def pre_build(self):
self.cwdx = self.bldnode.parent
self.cwd = self.cwdx.abspath()
self.bdir = self.bldnode.abspath()
return Build.BuildContext.old_pre_build(self)
Build.BuildContext.old_pre_build = Build.BuildContext.pre_build
Build.BuildContext.pre_build = pre_build
def abspath(self, env=None):
if env and hasattr(self, 'children'):
return self.get_bld().abspath()
return self.old_abspath()
Node.Node.old_abspath = Node.Node.abspath
Node.Node.abspath = abspath
def bldpath(self, env=None):
return self.abspath()
#return self.path_from(self.ctx.bldnode.parent)
Node.Node.bldpath = bldpath
def srcpath(self, env=None):
return self.abspath()
#return self.path_from(self.ctx.bldnode.parent)
Node.Node.srcpath = srcpath
def store_fast(self, filename):
file = open(filename, 'wb')
data = self.get_merged_dict()
try:
Build.cPickle.dump(data, file, -1)
finally:
file.close()
ConfigSet.ConfigSet.store_fast = store_fast
def load_fast(self, filename):
file = open(filename, 'rb')
try:
data = Build.cPickle.load(file)
finally:
file.close()
self.table.update(data)
ConfigSet.ConfigSet.load_fast = load_fast
@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
@after('propagate_uselib_vars', 'process_source')
def apply_incpaths(self):
lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
self.includes_nodes = lst
cwdx = getattr(self.bld, 'cwdx', self.bld.bldnode)
self.env['INCPATHS'] = [x.path_from(cwdx) for x in lst]
@conf
def define(self, key, val, quote=True, comment=None):
assert key and isinstance(key, str)
if val is True:
val = 1
elif val in (False, None):
val = 0
# waf 1.5
self.env[key] = val
if isinstance(val, int) or isinstance(val, float):
s = '%s=%s'
else:
s = quote and '%s="%s"' or '%s=%s'
app = s % (key, str(val))
ban = key + '='
lst = self.env.DEFINES
for x in lst:
if x.startswith(ban):
lst[lst.index(x)] = app
break
else:
self.env.append_value('DEFINES', app)
self.env.append_unique('define_key', key)
# compat15 removes this but we want to keep it
@conf
def undefine(self, key, from_env=True, comment=None):
assert key and isinstance(key, str)
ban = key + '='
self.env.DEFINES = [x for x in self.env.DEFINES if not x.startswith(ban)]
self.env.append_unique('define_key', key)
# waf 1.5
if from_env:
self.env[key] = ()
class ConfigurationContext(Configure.ConfigurationContext):
def init_dirs(self):
self.setenv('default')
self.env.merge_config_header = True
return super(ConfigurationContext, self).init_dirs()
def find_program_samba(self, *k, **kw):
kw['mandatory'] = False
ret = self.find_program_old(*k, **kw)
return ret
Configure.ConfigurationContext.find_program_old = Configure.ConfigurationContext.find_program
Configure.ConfigurationContext.find_program = find_program_samba
def PROCESS_SEPARATE_RULE(self, rule):
''' cause waf to process additional script based on `rule'.
You should have file named wscript_<stage>_rule in the current directory
where stage is either 'configure' or 'build'
'''
stage = ''
if isinstance(self, Configure.ConfigurationContext):
stage = 'configure'
elif isinstance(self, Build.BuildContext):
stage = 'build'
script = self.path.find_node('wscript_'+stage+'_'+rule)
if script:
txt = script.read()
bld = self
conf = self
ctx = self
dc = {'ctx': self, 'conf': self, 'bld': self}
if getattr(self.__class__, 'pre_recurse', None):
dc = self.pre_recurse(script)
exec(compile(txt, script.abspath(), 'exec'), dc)
if getattr(self.__class__, 'post_recurse', None):
dc = self.post_recurse(script)
Build.BuildContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
ConfigurationContext.PROCESS_SEPARATE_RULE = PROCESS_SEPARATE_RULE
Build.BuildContext.ENFORCE_GROUP_ORDERING = Utils.nada
Build.BuildContext.AUTOCLEANUP_STALE_FILES = Utils.nada
@conf
def check(self, *k, **kw):
'''Override the waf defaults to inject --with-directory options'''
# match the configuration test with speficic options, for example:
# --with-libiconv -> Options.options.iconv_open -> "Checking for library iconv"
self.validate_c(kw)
additional_dirs = []
if 'msg' in kw:
msg = kw['msg']
for x in Options.parser.parser.option_list:
if getattr(x, 'match', None) and msg in x.match:
d = getattr(Options.options, x.dest, '')
if d:
additional_dirs.append(d)
# we add the additional dirs twice: once for the test data, and again if the compilation test suceeds below
def add_options_dir(dirs, env):
for x in dirs:
if not x in env.CPPPATH:
env.CPPPATH = [os.path.join(x, 'include')] + env.CPPPATH
if not x in env.LIBPATH:
env.LIBPATH = [os.path.join(x, 'lib')] + env.LIBPATH
add_options_dir(additional_dirs, kw['env'])
self.start_msg(kw['msg'], **kw)
ret = None
try:
ret = self.run_build(*k, **kw)
except self.errors.ConfigurationError:
self.end_msg(kw['errmsg'], 'YELLOW', **kw)
if Logs.verbose > 1:
raise
else:
self.fatal('The configuration failed')
else:
kw['success'] = ret
# success! time for brandy
add_options_dir(additional_dirs, self.env)
ret = self.post_check(*k, **kw)
if not ret:
self.end_msg(kw['errmsg'], 'YELLOW', **kw)
self.fatal('The configuration failed %r' % ret)
else:
self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
return ret
@conf
def CHECK_LIBRARY_SUPPORT(conf, rpath=False, version_script=False, msg=None):
'''see if the platform supports building libraries'''
if msg is None:
if rpath:
msg = "rpath library support"
else:
msg = "building library support"
def build(bld):
lib_node = bld.srcnode.make_node('libdir/liblc1.c')
lib_node.parent.mkdir()
lib_node.write('int lib_func(void) { return 42; }\n', 'w')
main_node = bld.srcnode.make_node('main.c')
main_node.write('int main(void) {return !(lib_func() == 42);}', 'w')
linkflags = []
if version_script:
script = bld.srcnode.make_node('ldscript')
script.write('TEST_1.0A2 { global: *; };\n', 'w')
linkflags.append('-Wl,--version-script=%s' % script.abspath())
bld(features='c cshlib', source=lib_node, target='lib1', linkflags=linkflags, name='lib1')
o = bld(features='c cprogram', source=main_node, target='prog1', uselib_local='lib1')
if rpath:
o.rpath = [lib_node.parent.abspath()]
def run_app(self):
args = conf.SAMBA_CROSS_ARGS(msg=msg)
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = self.inputs[0].parent.abspath() + os.pathsep + env.get('LD_LIBRARY_PATH', '')
self.generator.bld.cmd_and_log([self.inputs[0].abspath()] + args, env=env)
o.post()
bld(rule=run_app, source=o.link_task.outputs[0])
# ok, so it builds
try:
conf.check(build_fun=build, msg='Checking for %s' % msg)
except conf.errors.ConfigurationError:
return False
return True
@conf
def CHECK_NEED_LC(conf, msg):
'''check if we need -lc'''
def build(bld):
lib_node = bld.srcnode.make_node('libdir/liblc1.c')
lib_node.parent.mkdir()
lib_node.write('#include <stdio.h>\nint lib_func(void) { FILE *f = fopen("foo", "r");}\n', 'w')
bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc')
try:
conf.check(build_fun=build, msg=msg, okmsg='-lc is unnecessary', errmsg='-lc is necessary')
except conf.errors.ConfigurationError:
return False
return True
# already implemented on "waf -v"
def order(bld, tgt_list):
return True
Build.BuildContext.check_group_ordering = order
@conf
def CHECK_CFG(self, *k, **kw):
if 'args' in kw:
kw['args'] = shlex.split(kw['args'])
if not 'mandatory' in kw:
kw['mandatory'] = False
kw['global_define'] = True
return self.check_cfg(*k, **kw)

View File

@ -8,7 +8,6 @@ from samba_utils import SUBST_VARS_RECURSIVE
TaskGen.task_gen.apply_verif = Utils.nada
# bring in the other samba modules
from samba_optimisation import *
from samba_utils import *
from samba_version import *
from samba_autoconf import *
@ -31,10 +30,10 @@ import hpuxcc
import generic_cc
import samba_dist
import samba_wildcard
import stale_files
import symbols
import pkgconfig
import configure_file
import samba_waf18
# some systems have broken threading in python
if os.environ.get('WAF_NOTHREADS') == '1':
@ -44,8 +43,7 @@ LIB_PATH="shared"
os.environ['PYTHONUNBUFFERED'] = '1'
if Constants.HEXVERSION < 0x105019:
if Constants.HEXVERSION not in (0x105019, 0x1090a00):
Logs.error('''
Please use the version of waf that comes with Samba, not
a system installed version. See http://wiki.samba.org/index.php/Waf
@ -282,7 +280,7 @@ def SAMBA_LIBRARY(bld, libname, source,
vscript = "%s.vscript" % libname
bld.ABI_VSCRIPT(version_libname, abi_directory, version, vscript,
abi_match)
fullname = apply_pattern(bundled_name, bld.env.shlib_PATTERN)
fullname = apply_pattern(bundled_name, bld.env.cshlib_PATTERN)
fullpath = bld.path.find_or_declare(fullname)
vscriptpath = bld.path.find_or_declare(vscript)
if not fullpath:
@ -292,7 +290,7 @@ def SAMBA_LIBRARY(bld, libname, source,
bld.add_manual_dependency(fullpath, vscriptpath)
if bld.is_install:
# also make the .inst file depend on the vscript
instname = apply_pattern(bundled_name + '.inst', bld.env.shlib_PATTERN)
instname = apply_pattern(bundled_name + '.inst', bld.env.cshlib_PATTERN)
bld.add_manual_dependency(bld.path.find_or_declare(instname), bld.path.find_or_declare(vscript))
vscript = os.path.join(bld.path.abspath(bld.env), vscript)
@ -327,6 +325,8 @@ def SAMBA_LIBRARY(bld, libname, source,
link_name = 'shared/%s' % realname
if link_name:
if 'waflib.extras.compat15' in sys.modules:
link_name = 'default/' + link_name
t.link_name = link_name
if pc_files is not None and not private_library:
@ -674,7 +674,7 @@ def SAMBA_GENERATOR(bld, name, rule, source='', target='',
target=target,
shell=isinstance(rule, str),
update_outputs=True,
before='cc',
before='c',
ext_out='.c',
samba_type='GENERATOR',
dep_vars = dep_vars,
@ -834,6 +834,8 @@ def install_file(bld, destdir, file, chmod=MODE_644, flat=False,
python_fixup=False, perl_fixup=False,
destname=None, base_name=None):
'''install a file'''
if not isinstance(file, str):
file = file.abspath()
destdir = bld.EXPAND_VARIABLES(destdir)
if not destname:
destname = file
@ -958,59 +960,6 @@ def SAMBAMANPAGES(bld, manpages, extra_source=None):
bld.INSTALL_FILES('${MANDIR}/man%s' % m[-1], m, flat=True)
Build.BuildContext.SAMBAMANPAGES = SAMBAMANPAGES
#############################################################
# give a nicer display when building different types of files
def progress_display(self, msg, fname):
col1 = Logs.colors(self.color)
col2 = Logs.colors.NORMAL
total = self.position[1]
n = len(str(total))
fs = '[%%%dd/%%%dd] %s %%s%%s%%s\n' % (n, n, msg)
return fs % (self.position[0], self.position[1], col1, fname, col2)
def link_display(self):
if Options.options.progress_bar != 0:
return Task.Task.old_display(self)
fname = self.outputs[0].bldpath(self.env)
return progress_display(self, 'Linking', fname)
Task.TaskBase.classes['cc_link'].display = link_display
def samba_display(self):
if Options.options.progress_bar != 0:
return Task.Task.old_display(self)
targets = LOCAL_CACHE(self, 'TARGET_TYPE')
if self.name in targets:
target_type = targets[self.name]
type_map = { 'GENERATOR' : 'Generating',
'PROTOTYPE' : 'Generating'
}
if target_type in type_map:
return progress_display(self, type_map[target_type], self.name)
if len(self.inputs) == 0:
return Task.Task.old_display(self)
fname = self.inputs[0].bldpath(self.env)
if fname[0:3] == '../':
fname = fname[3:]
ext_loc = fname.rfind('.')
if ext_loc == -1:
return Task.Task.old_display(self)
ext = fname[ext_loc:]
ext_map = { '.idl' : 'Compiling IDL',
'.et' : 'Compiling ERRTABLE',
'.asn1': 'Compiling ASN1',
'.c' : 'Compiling' }
if ext in ext_map:
return progress_display(self, ext_map[ext], fname)
return Task.Task.old_display(self)
Task.TaskBase.classes['Task'].old_display = Task.TaskBase.classes['Task'].display
Task.TaskBase.classes['Task'].display = samba_display
@after('apply_link')
@feature('cshlib')
def apply_bundle_remove_dynamiclib_patch(self):

View File

@ -14,7 +14,7 @@ from optparse import SUPPRESS_HELP
# are resolved related to WAFCACHE. It will need a lot of testing
# before it is enabled by default.
if '--enable-auto-reconfigure' in sys.argv:
Configure.autoconfig = True
Configure.autoconfig = 'clobber'
def set_options(opt):
opt.tool_options('compiler_cc')
@ -239,28 +239,7 @@ def configure(conf):
p = os.path.join(conf.srcdir, 'buildtools/wafsamba/gccdeps.pyc')
if os.path.exists(p):
os.remove(p)
from TaskGen import feature, after
@feature('testd')
@after('apply_core')
def check_d(self):
tsk = self.compiled_tasks[0]
tsk.outputs.append(tsk.outputs[0].change_ext('.d'))
import Task
cc = Task.TaskBase.classes['cc']
oldmeth = cc.run
cc.run = Task.compile_fun_noshell('cc', '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath(env)}')[0]
try:
try:
conf.check(features='c testd', fragment='int main() {return 0;}\n', ccflags=['-MD'], mandatory=True, msg='Check for -MD')
except:
pass
else:
conf.check_tool('gccdeps', tooldir=conf.srcdir + "/buildtools/wafsamba")
finally:
cc.run = oldmeth
conf.load('gccdeps')
# make the install paths available in environment
conf.env.LIBDIR = Options.options.LIBDIR or '${PREFIX}/lib'
@ -512,7 +491,7 @@ struct foo bar = { .y = 'X', .x = 1 };
conf.DEFINE('TIME_WITH_SYS_TIME', 1)
# cope with different extensions for libraries
(root, ext) = os.path.splitext(conf.env.shlib_PATTERN)
(root, ext) = os.path.splitext(conf.env.cshlib_PATTERN)
if ext[0] == '.':
conf.define('SHLIBEXT', ext[1:], quote=True)
else:

View File

@ -211,7 +211,7 @@ def testonly(ctx):
# Symlink back to source dir so it can find tests in test/
link = os.path.join(testdir, 'test')
if not os.path.exists(link):
os.symlink(os.path.abspath(os.path.join(env.cwd, 'test')), link)
os.symlink(ctx.path.make_node('test').abspath(), link)
sh_tests = ["test/test_tdbbackup.sh test/jenkins-be-hash.tdb"]
@ -249,9 +249,9 @@ def testonly(ctx):
# WAF doesn't build the unit tests for this, maybe because they don't link with tdb?
# This forces it
def test(ctx):
import Scripting
Scripting.commands.append('build')
Scripting.commands.append('testonly')
import Options
Options.commands.append('build')
Options.commands.append('testonly')
def dist():
'''makes a tarball for distribution'''

View File

@ -17,7 +17,7 @@ def check_system_perl_module(conf, module, version=None):
return False
# Check for system perl module
if not conf.check_perl_module(module_check):
if conf.check_perl_module(module_check) is None:
return False
conf.define('USING_SYSTEM_%s' % bundle_name.upper(), 1)

View File

@ -323,5 +323,5 @@ def cmd_test(opt):
# if running all tests, then force a symbol check
env = LOAD_ENVIRONMENT()
CHECK_MAKEFLAGS(env)
Scripting.commands.append('build')
Scripting.commands.append('testonly')
Options.commands.append('build')
Options.commands.append('testonly')

View File

@ -505,7 +505,7 @@ bld.SAMBA3_MODULE('vfs_ceph',
init_function='',
internal_module=bld.SAMBA3_IS_STATIC_MODULE('vfs_ceph'),
enabled=bld.SAMBA3_IS_ENABLED_MODULE('vfs_ceph'),
cflags=bld.CONFIG_GET('CCFLAGS_CEPHFS'))
cflags=bld.CONFIG_GET('CFLAGS_CEPHFS'))
bld.SAMBA3_MODULE('vfs_glusterfs',
subsystem='vfs',

View File

@ -1531,7 +1531,7 @@ main() {
conf.CHECK_DECLS('FS_IOC_GETFLAGS FS_COMPR_FL', headers='linux/fs.h')):
conf.DEFINE('HAVE_LINUX_IOCTL', '1')
conf.env['CCFLAGS_CEPHFS'] = "-D_FILE_OFFSET_BITS=64"
conf.env['CFLAGS_CEPHFS'] = "-D_FILE_OFFSET_BITS=64"
if Options.options.libcephfs_dir:
conf.env['CPPPATH_CEPHFS'] = Options.options.libcephfs_dir + '/include'
conf.env['LIBPATH_CEPHFS'] = Options.options.libcephfs_dir + '/lib'

View File

@ -157,7 +157,7 @@ def HEIMDAL_ERRTABLE(name, source):
t = bld(rule='"${SRC[1].abspath(env)}" "${TGT[0].parent.abspath(env)}" "${COMPILE_ET}" "${SRC[0].abspath(env)}" ${TGT[0].bldpath(env)}',
ext_out = '.c',
before = 'cc',
before = 'c',
update_outputs = True,
shell = True,
source = sources,
@ -178,7 +178,7 @@ def HEIMDAL_AUTOPROTO(header, source, options=None, group='prototypes'):
target=header,
update_outputs=True,
ext_out='.c',
before='cc')
before='c')
t.env.HEIMDAL = os.path.join(bld.srcnode.abspath(), 'source4/heimdal')
t.env.OPTIONS = options

View File

@ -170,7 +170,7 @@ if not krb5_config:
krb5_config = conf.find_program("krb5-config", var="HEIMDAL_KRB5_CONFIG")
if krb5_config:
# Not ideal, but seems like the best way to get at these paths:
f = open(krb5_config, 'r')
f = open(krb5_config[0], 'r')
try:
for l in f:
if l.startswith("libdir="):
@ -216,10 +216,10 @@ if check_system_heimdal_lib("roken", "rk_socket_set_reuseaddr", "roken.h"):
# and include config.h if it is set, resulting in failure (since config.h
# doesn't yet exist)
CCDEFINES = list(conf.env.CCDEFINES)
DEFINES = list(conf.env.DEFINES)
conf.undefine("HAVE_CONFIG_H")
while "HAVE_CONFIG_H=1" in conf.env.CCDEFINES:
conf.env.CCDEFINES.remove("HAVE_CONFIG_H=1")
while "HAVE_CONFIG_H=1" in conf.env.DEFINES:
conf.env.DEFINES.remove("HAVE_CONFIG_H=1")
try:
check_system_heimdal_lib("wind", "wind_stringprep", "wind.h", onlyif="roken")
check_system_heimdal_lib("hx509", "hx509_bitstring_print", "hx509.h", onlyif="roken wind")
@ -249,7 +249,7 @@ try:
check_system_heimdal_lib("kdc", "kdc_log", "kdc.h",
onlyif="roken krb5 hdb asn1 heimntlm hcrypto com_err wind heimbase")
finally:
conf.env.CCDEFINES = CCDEFINES
conf.env.DEFINES = DEFINES
# With the proper checks in place we should be able to build against the system libtommath.
#if conf.CHECK_BUNDLED_SYSTEM('tommath', checkfunctions='mp_init', headers='tommath.h'):

View File

@ -146,7 +146,7 @@ def configure(conf):
if not conf.CHECK_SHLIB_W_PYTHON("Checking if -fno-common is needed"):
conf.ADD_CFLAGS('-fno-common')
if not conf.CHECK_SHLIB_W_PYTHON("Checking if -undefined dynamic_lookup is not need"):
conf.env.append_value('shlib_LINKFLAGS', ['-undefined', 'dynamic_lookup'])
conf.env.append_value('cshlib_LINKFLAGS', ['-undefined', 'dynamic_lookup'])
if sys.platform == 'darwin':
conf.ADD_LDFLAGS('-framework CoreFoundation')