1
0
mirror of https://github.com/samba-team/samba.git synced 2025-01-08 21:18:16 +03:00

waf: upgrade to 2.0.20

This contain an important change:
"Fix gccdeps.scan() returning nodes that no longer exist on disk."
https://gitlab.com/ita1024/waf/-/merge_requests/2293

Signed-off-by: David Mulder <dmulder@suse.com>
Reviewed-by: Stefan Metzmacher <metze@samba.org>
This commit is contained in:
David Mulder 2020-08-24 13:12:46 -06:00 committed by Stefan Metzmacher
parent 896b7bbcf2
commit 5fc3a71d0f
26 changed files with 936 additions and 126 deletions

2
buildtools/bin/waf vendored
View File

@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE.
import os, sys, inspect
VERSION="2.0.18"
VERSION="2.0.20"
REVISION="x"
GIT="x"
INSTALL="x"

View File

@ -459,7 +459,7 @@ def RECURSE(ctx, directory):
return
visited_dirs.add(key)
relpath = os.path.relpath(abspath, ctx.path.abspath())
if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext']:
if ctxclass in ['tmp', 'OptionsContext', 'ConfigurationContext', 'BuildContext', 'ClangDbContext']:
return ctx.recurse(relpath)
if 'waflib.extras.compat15' in sys.modules:
return ctx.recurse(relpath)

View File

@ -5,6 +5,7 @@ from waflib import Build, Configure, Node, Utils, Options, Logs, TaskGen
from waflib import ConfigSet
from waflib.TaskGen import feature, after
from waflib.Configure import conf, ConfigurationContext
from waflib.extras import clang_compilation_database
from waflib.Tools.flex import decide_ext
@ -37,7 +38,7 @@ TaskGen.declare_chain(
)
for y in (Build.BuildContext, Build.CleanContext, Build.InstallContext, Build.UninstallContext, Build.ListContext):
for y in (Build.BuildContext, Build.CleanContext, Build.InstallContext, Build.UninstallContext, Build.ListContext, clang_compilation_database.ClangDbContext):
class tmp(y):
variant = 'default'

View File

@ -38,7 +38,7 @@ LIB_PATH="shared"
os.environ['PYTHONUNBUFFERED'] = '1'
if Context.HEXVERSION not in (0x2001200,):
if Context.HEXVERSION not in (0x2001400,):
Logs.error('''
Please use the version of waf that comes with Samba, not
a system installed version. See http://wiki.samba.org/index.php/Waf

View File

@ -508,23 +508,27 @@ def find_binary(self, filenames, exts, paths):
@conf
def run_build(self, *k, **kw):
"""
Create a temporary build context to execute a build. A reference to that build
context is kept on self.test_bld for debugging purposes, and you should not rely
on it too much (read the note on the cache below).
The parameters given in the arguments to this function are passed as arguments for
a single task generator created in the build. Only three parameters are obligatory:
Create a temporary build context to execute a build. A temporary reference to that build
context is kept on self.test_bld for debugging purposes.
The arguments to this function are passed to a single task generator for that build.
Only three parameters are mandatory:
:param features: features to pass to a task generator created in the build
:type features: list of string
:param compile_filename: file to create for the compilation (default: *test.c*)
:type compile_filename: string
:param code: code to write in the filename to compile
:param code: input file contents
:type code: string
Though this function returns *0* by default, the build may set an attribute named *retval* on the
Though this function returns *0* by default, the build may bind attribute named *retval* on the
build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
This function also features a cache which can be enabled by the following option::
The temporary builds creates a temporary folder; the name of that folder is calculated
by hashing input arguments to this function, with the exception of :py:class:`waflib.ConfigSet.ConfigSet`
objects which are used for both reading and writing values.
This function also features a cache which is disabled by default; that cache relies
on the hash value calculated as indicated above::
def options(opt):
opt.add_option('--confcache', dest='confcache', default=0,
@ -538,7 +542,10 @@ def run_build(self, *k, **kw):
buf = []
for key in sorted(kw.keys()):
v = kw[key]
if hasattr(v, '__call__'):
if isinstance(v, ConfigSet.ConfigSet):
# values are being written to, so they are excluded from contributing to the hash
continue
elif hasattr(v, '__call__'):
buf.append(Utils.h_fun(v))
else:
buf.append(str(v))

View File

@ -6,20 +6,30 @@
Classes and functions enabling the command system
"""
import os, re, imp, sys
import os, re, sys
from waflib import Utils, Errors, Logs
import waflib.Node
if sys.hexversion > 0x3040000:
import types
class imp(object):
new_module = lambda x: types.ModuleType(x)
else:
import imp
# the following 3 constants are updated on each new release (do not touch)
HEXVERSION=0x2001200
HEXVERSION=0x2001400
"""Constant updated on new releases"""
WAFVERSION="2.0.18"
WAFVERSION="2.0.20"
"""Constant updated on new releases"""
WAFREVISION="314689b8994259a84f0de0aaef74d7ce91f541ad"
WAFREVISION="668769470956da8c5b60817cb8884cd7d0f87cd4"
"""Git revision when the waf version is updated"""
WAFNAME="waf"
"""Application name displayed on --help"""
ABI = 20
"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""

View File

@ -44,7 +44,7 @@ class opt_parser(optparse.OptionParser):
"""
def __init__(self, ctx, allow_unknown=False):
optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
version='%s %s (%s)' % (Context.WAFNAME, Context.WAFVERSION, Context.WAFREVISION))
self.formatter.width = Logs.get_term_cols()
self.ctx = ctx
self.allow_unknown = allow_unknown
@ -62,6 +62,21 @@ class opt_parser(optparse.OptionParser):
else:
self.error(str(e))
def _process_long_opt(self, rargs, values):
# --custom-option=-ftxyz is interpreted as -f -t... see #2280
if self.allow_unknown:
back = [] + rargs
try:
optparse.OptionParser._process_long_opt(self, rargs, values)
except optparse.BadOptionError:
while rargs:
rargs.pop()
rargs.extend(back)
rargs.pop(0)
raise
else:
optparse.OptionParser._process_long_opt(self, rargs, values)
def print_usage(self, file=None):
return self.print_help(file)
@ -96,11 +111,11 @@ class opt_parser(optparse.OptionParser):
lst.sort()
ret = '\n'.join(lst)
return '''waf [commands] [options]
return '''%s [commands] [options]
Main commands (example: ./waf build -j4)
Main commands (example: ./%s build -j4)
%s
''' % ret
''' % (Context.WAFNAME, Context.WAFNAME, ret)
class OptionsContext(Context.Context):
@ -141,9 +156,9 @@ class OptionsContext(Context.Context):
gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
gr.add_option('--no-lock-in-run', action='store_true', default=os.environ.get('NO_LOCK_IN_RUN', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
gr.add_option('--no-lock-in-out', action='store_true', default=os.environ.get('NO_LOCK_IN_OUT', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
gr.add_option('--no-lock-in-top', action='store_true', default=os.environ.get('NO_LOCK_IN_TOP', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
if not default_prefix:
@ -282,6 +297,8 @@ class OptionsContext(Context.Context):
elif arg != 'options':
commands.append(arg)
if options.jobs < 1:
options.jobs = 1
for name in 'top out destdir prefix bindir libdir'.split():
# those paths are usually expanded from Context.launch_dir
if getattr(options, name, None):

View File

@ -306,7 +306,7 @@ def distclean(ctx):
# remove a build folder, if any
cur = '.'
if ctx.options.no_lock_in_top:
if os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top:
cur = ctx.options.out
try:
@ -333,9 +333,9 @@ def distclean(ctx):
remove_and_log(env.out_dir, shutil.rmtree)
env_dirs = [env.out_dir]
if not ctx.options.no_lock_in_top:
if not (os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top):
env_dirs.append(env.top_dir)
if not ctx.options.no_lock_in_run:
if not (os.environ.get('NO_LOCK_IN_RUN') or ctx.options.no_lock_in_run):
env_dirs.append(env.run_dir)
for k in env_dirs:
p = os.path.join(k, Options.lockfile)

View File

@ -38,7 +38,7 @@ def sniff_features(**kw):
:return: the list of features for a task generator processing the source files
:rtype: list of string
"""
exts = get_extensions(kw['source'])
exts = get_extensions(kw.get('source', []))
typ = kw['typ']
feats = []
@ -72,7 +72,7 @@ def sniff_features(**kw):
feats.append(x + typ)
will_link = True
if not will_link and not kw.get('features', []):
raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw)
raise Errors.WafError('Unable to determine how to link %r, try adding eg: features="c cshlib"?' % kw)
return feats
def set_features(kw, typ):

View File

@ -86,6 +86,10 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
:type uselib_store: string
:param env: config set or conf.env by default
:type env: :py:class:`waflib.ConfigSet.ConfigSet`
:param force_static: force usage of static libraries
:type force_static: bool default False
:param posix: usage of POSIX mode for shlex lexical analiysis library
:type posix: bool default True
"""
assert(isinstance(line, str))
@ -103,6 +107,8 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
lex.commenters = ''
lst = list(lex)
so_re = re.compile(r"\.so(?:\.[0-9]+)*$")
# append_unique is not always possible
# for example, apple flags may require both -arch i386 and -arch ppc
uselib = uselib_store
@ -144,7 +150,7 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
elif x.startswith('-std='):
prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS'
app(prefix, x)
elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie'):
elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie', '-flto', '-fno-lto'):
app('CFLAGS', x)
app('CXXFLAGS', x)
app('LINKFLAGS', x)
@ -180,7 +186,7 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
app('CFLAGS', tmp)
app('CXXFLAGS', tmp)
app('LINKFLAGS', tmp)
elif x.endswith(('.a', '.so', '.dylib', '.lib')):
elif x.endswith(('.a', '.dylib', '.lib')) or so_re.search(x):
appu('LINKFLAGS', x) # not cool, #762
else:
self.to_log('Unhandled flag %r' % x)
@ -246,6 +252,8 @@ def exec_cfg(self, kw):
* if modversion is given, then return the module version
* else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable
:param path: the **-config program to use**
:type path: list of string
:param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests)
:type atleast_pkgconfig_version: string
:param package: package name, for example *gtk+-2.0*
@ -260,6 +268,12 @@ def exec_cfg(self, kw):
:type variables: list of string
:param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES)
:type define_variable: dict(string: string)
:param pkg_config_path: paths where pkg-config should search for .pc config files (overrides env.PKG_CONFIG_PATH if exists)
:type pkg_config_path: string, list of directories separated by colon
:param force_static: force usage of static libraries
:type force_static: bool default False
:param posix: usage of POSIX mode for shlex lexical analiysis library
:type posix: bool default True
"""
path = Utils.to_list(kw['path'])
@ -334,6 +348,7 @@ def check_cfg(self, *k, **kw):
"""
Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc).
This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg`
so check exec_cfg parameters descriptions for more details on kw passed
A few examples::
@ -1267,10 +1282,11 @@ def multicheck(self, *k, **kw):
tasks = []
id_to_task = {}
for dct in k:
for counter, dct in enumerate(k):
x = Task.classes['cfgtask'](bld=bld, env=None)
tasks.append(x)
x.args = dct
x.args['multicheck_counter'] = counter
x.bld = bld
x.conf = self
x.args = dct

View File

@ -180,9 +180,15 @@ def check_large_file(self, **kw):
########################################################################################
ENDIAN_FRAGMENT = '''
#ifdef _MSC_VER
#define testshlib_EXPORT __declspec(dllexport)
#else
#define testshlib_EXPORT
#endif
short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
int use_ascii (int i) {
int testshlib_EXPORT use_ascii (int i) {
return ascii_mm[i] + ascii_ii[i];
}
short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
@ -208,12 +214,12 @@ class grep_for_endianness(Task.Task):
return -1
@feature('grep_for_endianness')
@after_method('process_source')
@after_method('apply_link')
def grep_for_endianness_fun(self):
"""
Used by the endianness configuration test
"""
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
self.create_task('grep_for_endianness', self.link_task.outputs[0])
@conf
def check_endianness(self):
@ -223,7 +229,8 @@ def check_endianness(self):
tmp = []
def check_msg(self):
return tmp[0]
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
self.check(fragment=ENDIAN_FRAGMENT, features='c cshlib grep_for_endianness',
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp,
okmsg=check_msg, confcache=None)
return tmp[0]

View File

@ -37,7 +37,7 @@ from waflib.Logs import debug
c_compiler = {
'win32': ['msvc', 'gcc', 'clang'],
'cygwin': ['gcc'],
'cygwin': ['gcc', 'clang'],
'darwin': ['clang', 'gcc'],
'aix': ['xlc', 'gcc', 'clang'],
'linux': ['gcc', 'clang', 'icc'],

View File

@ -38,7 +38,7 @@ from waflib.Logs import debug
cxx_compiler = {
'win32': ['msvc', 'g++', 'clang++'],
'cygwin': ['g++'],
'cygwin': ['g++', 'clang++'],
'darwin': ['clang++', 'g++'],
'aix': ['xlc++', 'g++', 'clang++'],
'linux': ['g++', 'clang++', 'icpc'],

View File

@ -13,8 +13,8 @@ from waflib.TaskGen import extension
from waflib.Configure import conf
ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS'])
ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS'])
ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'LDFLAGS'])
ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])
@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')

View File

@ -13,22 +13,11 @@ from waflib.Configure import conf
@conf
def find_irixcc(conf):
v = conf.env
cc = None
if v.CC:
cc = v.CC
elif 'CC' in conf.environ:
cc = conf.environ['CC']
if not cc:
cc = conf.find_program('cc', var='CC')
if not cc:
conf.fatal('irixcc was not found')
cc = conf.find_program('cc', var='CC')
try:
conf.cmd_and_log(cc + ['-version'])
except Errors.WafError:
conf.fatal('%r -version could not be executed' % cc)
v.CC = cc
v.CC_NAME = 'irix'
@conf
@ -57,7 +46,6 @@ def irixcc_common_flags(conf):
def configure(conf):
conf.find_irixcc()
conf.find_cpp()
conf.find_ar()
conf.irixcc_common_flags()
conf.cc_load_tools()

View File

@ -251,7 +251,7 @@ def use_javac_files(self):
base_node = tg.path.get_bld()
self.use_lst.append(base_node.abspath())
self.javac_task.dep_nodes.extend([x for x in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
self.javac_task.dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
for tsk in tg.tasks:
self.javac_task.set_run_after(tsk)

View File

@ -620,7 +620,7 @@ def configure(conf):
v.PYO = getattr(Options.options, 'pyo', 1)
try:
v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip()
v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import sys\ntry:\n print(sys.implementation.cache_tag)\nexcept AttributeError:\n import imp\n print(imp.get_tag())\n"]).strip()
except Errors.WafError:
pass

View File

@ -482,8 +482,8 @@ def configure(self):
self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
# Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
frag = '#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
uses = 'QT5CORE QT5WIDGETS QT5GUI'
frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
uses = 'QT5CORE'
for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]:
msg = 'See if Qt files compile '
if flag:
@ -499,7 +499,7 @@ def configure(self):
# FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
if Utils.unversioned_sys_platform() == 'freebsd':
frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
try:
self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
except self.errors.ConfigurationError:

View File

@ -891,7 +891,7 @@ def run_prefork_process(cmd, kwargs, cargs):
"""
Delegates process execution to a pre-forked process instance.
"""
if not 'env' in kwargs:
if not kwargs.get('env'):
kwargs['env'] = dict(os.environ)
try:
obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs]))

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013
# Alibek Omarov, 2019
"""
Writes the c and cpp compile commands into build/compile_commands.json
@ -8,14 +9,23 @@ see http://clang.llvm.org/docs/JSONCompilationDatabase.html
Usage:
def configure(conf):
conf.load('compiler_cxx')
...
conf.load('clang_compilation_database')
Load this tool in `options` to be able to generate database
by request in command-line and before build:
$ waf clangdb
def options(opt):
opt.load('clang_compilation_database')
Otherwise, load only in `configure` to generate it always before build.
def configure(conf):
conf.load('compiler_cxx')
...
conf.load('clang_compilation_database')
"""
import sys, os, json, shlex, pipes
from waflib import Logs, TaskGen, Task
from waflib import Logs, TaskGen, Task, Build, Scripting
Task.Task.keep_last_cmd = True
@ -23,63 +33,103 @@ Task.Task.keep_last_cmd = True
@TaskGen.after_method('process_use')
def collect_compilation_db_tasks(self):
"Add a compilation database entry for compiled tasks"
try:
clang_db = self.bld.clang_compilation_database_tasks
except AttributeError:
clang_db = self.bld.clang_compilation_database_tasks = []
self.bld.add_post_fun(write_compilation_database)
if not isinstance(self.bld, ClangDbContext):
return
tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
for task in getattr(self, 'compiled_tasks', []):
if isinstance(task, tup):
clang_db.append(task)
self.bld.clang_compilation_database_tasks.append(task)
def write_compilation_database(ctx):
"Write the clang compilation database as JSON"
database_file = ctx.bldnode.make_node('compile_commands.json')
Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
try:
root = json.load(database_file)
except IOError:
root = []
clang_db = dict((x['file'], x) for x in root)
for task in getattr(ctx, 'clang_compilation_database_tasks', []):
class ClangDbContext(Build.BuildContext):
'''generates compile_commands.json by request'''
cmd = 'clangdb'
clang_compilation_database_tasks = []
def write_compilation_database(self):
"""
Write the clang compilation database as JSON
"""
database_file = self.bldnode.make_node('compile_commands.json')
Logs.info('Build commands will be stored in %s', database_file.path_from(self.path))
try:
cmd = task.last_cmd
except AttributeError:
continue
directory = getattr(task, 'cwd', ctx.variant_dir)
f_node = task.inputs[0]
filename = os.path.relpath(f_node.abspath(), directory)
entry = {
"directory": directory,
"arguments": cmd,
"file": filename,
}
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write(json.dumps(root, indent=2))
root = database_file.read_json()
except IOError:
root = []
clang_db = dict((x['file'], x) for x in root)
for task in self.clang_compilation_database_tasks:
try:
cmd = task.last_cmd
except AttributeError:
continue
f_node = task.inputs[0]
filename = f_node.path_from(task.get_cwd())
entry = {
"directory": task.get_cwd().abspath(),
"arguments": cmd,
"file": filename,
}
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write_json(root)
# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled.
# This will make sure compile_commands.json is always fully up to date.
# Previously you could end up with a partial compile_commands.json if the build failed.
for x in ('c', 'cxx'):
if x not in Task.classes:
continue
def execute(self):
"""
Build dry run
"""
self.restore()
t = Task.classes[x]
if not self.all_envs:
self.load_envs()
def runnable_status(self):
def exec_command(cmd, **kw):
pass
self.recurse([self.run_dir])
self.pre_build()
run_status = self.old_runnable_status()
if run_status == Task.SKIP_ME:
setattr(self, 'old_exec_command', getattr(self, 'exec_command', None))
setattr(self, 'exec_command', exec_command)
self.run()
setattr(self, 'exec_command', getattr(self, 'old_exec_command', None))
return run_status
# we need only to generate last_cmd, so override
# exec_command temporarily
def exec_command(self, *k, **kw):
return 0
setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None))
setattr(t, 'runnable_status', runnable_status)
for g in self.groups:
for tg in g:
try:
f = tg.post
except AttributeError:
pass
else:
f()
if isinstance(tg, Task.Task):
lst = [tg]
else: lst = tg.tasks
for tsk in lst:
tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
if isinstance(tsk, tup):
old_exec = tsk.exec_command
tsk.exec_command = exec_command
tsk.run()
tsk.exec_command = old_exec
self.write_compilation_database()
EXECUTE_PATCHED = False
def patch_execute():
global EXECUTE_PATCHED
if EXECUTE_PATCHED:
return
def new_execute_build(self):
"""
Invoke clangdb command before build
"""
if self.cmd.startswith('build'):
Scripting.run_command('clangdb')
old_execute_build(self)
old_execute_build = getattr(Build.BuildContext, 'execute_build', None)
setattr(Build.BuildContext, 'execute_build', new_execute_build)
EXECUTE_PATCHED = True
patch_execute()

View File

@ -69,6 +69,7 @@ def parse_doxy(txt):
class doxygen(Task.Task):
vars = ['DOXYGEN', 'DOXYFLAGS']
color = 'BLUE'
ext_in = [ '.py', '.c', '.h', '.java', '.pb.cc' ]
def runnable_status(self):
'''

View File

@ -27,7 +27,7 @@ if not c_preproc.go_absolute:
gccdeps_flags = ['-MMD']
# Third-party tools are allowed to add extra names in here with append()
supported_compilers = ['gcc', 'icc', 'clang']
supported_compilers = ['gas', 'gcc', 'icc', 'clang']
def scan(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
@ -175,14 +175,14 @@ def wrap_compiled_task(classname):
derived_class.scan = scan
derived_class.sig_implicit_deps = sig_implicit_deps
for k in ('c', 'cxx'):
for k in ('asm', 'c', 'cxx'):
if k in Task.classes:
wrap_compiled_task(k)
@before_method('process_source')
@feature('force_gccdeps')
def force_gccdeps(self):
self.env.ENABLE_GCCDEPS = ['c', 'cxx']
self.env.ENABLE_GCCDEPS = ['asm', 'c', 'cxx']
def configure(conf):
# in case someone provides a --enable-gccdeps command-line option
@ -191,6 +191,15 @@ def configure(conf):
global gccdeps_flags
flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
if conf.env.ASM_NAME in supported_compilers:
try:
conf.check(fragment='', features='asm force_gccdeps', asflags=flags, compile_filename='test.S', msg='Checking for asm flags %r' % ''.join(flags))
except Errors.ConfigurationError:
pass
else:
conf.env.append_value('ASFLAGS', flags)
conf.env.append_unique('ENABLE_GCCDEPS', 'asm')
if conf.env.CC_NAME in supported_compilers:
try:
conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))

View File

@ -1,6 +1,6 @@
#! /usr/bin/env python
# encoding: utf-8
# Federico Pellegrin, 2017 (fedepell)
# Federico Pellegrin, 2019 (fedepell)
"""
Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
@ -11,6 +11,10 @@ standard waf unit test environment. It has been tested with TestNG and JUnit
but should be easily expandable to other frameworks given the flexibility of
ut_str provided by the standard waf unit test environment.
The extra takes care also of managing non-java dependencies (ie. C/C++ libraries
using JNI or Python modules via JEP) and setting up the environment needed to run
them.
Example usage:
def options(opt):
@ -20,15 +24,15 @@ def configure(conf):
conf.load('java javatest')
def build(bld):
[ ... mainprog is built here ... ]
bld(features = 'javac javatest',
srcdir = 'test/',
outdir = 'test',
srcdir = 'test/',
outdir = 'test',
sourcepath = ['test'],
classpath = [ 'src' ],
basedir = 'test',
classpath = [ 'src' ],
basedir = 'test',
use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/
ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}',
jtest_source = bld.path.ant_glob('test/*.xml'),
@ -53,10 +57,107 @@ The runner class presence on the system is checked for at configuration stage.
"""
import os
from waflib import Task, TaskGen, Options
from waflib import Task, TaskGen, Options, Errors, Utils, Logs
from waflib.Tools import ccroot
JAR_RE = '**/*'
def _process_use_rec(self, name):
"""
Recursively process ``use`` for task generator with name ``name``..
Used by javatest_process_use.
"""
if name in self.javatest_use_not or name in self.javatest_use_seen:
return
try:
tg = self.bld.get_tgen_by_name(name)
except Errors.WafError:
self.javatest_use_not.add(name)
return
self.javatest_use_seen.append(name)
tg.post()
for n in self.to_list(getattr(tg, 'use', [])):
_process_use_rec(self, n)
@TaskGen.feature('javatest')
@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath')
@TaskGen.after_method('process_source', 'apply_link', 'use_javac_files')
def javatest_process_use(self):
"""
Process the ``use`` attribute which contains a list of task generator names and store
paths that later is used to populate the unit test runtime environment.
"""
self.javatest_use_not = set()
self.javatest_use_seen = []
self.javatest_libpaths = [] # strings or Nodes
self.javatest_pypaths = [] # strings or Nodes
self.javatest_dep_nodes = []
names = self.to_list(getattr(self, 'use', []))
for name in names:
_process_use_rec(self, name)
def extend_unique(lst, varlst):
ext = []
for x in varlst:
if x not in lst:
ext.append(x)
lst.extend(ext)
# Collect type specific info needed to construct a valid runtime environment
# for the test.
for name in self.javatest_use_seen:
tg = self.bld.get_tgen_by_name(name)
# Python-Java embedding crosstools such as JEP
if 'py' in tg.features:
# Python dependencies are added to PYTHONPATH
pypath = getattr(tg, 'install_from', tg.path)
if 'buildcopy' in tg.features:
# Since buildcopy is used we assume that PYTHONPATH in build should be used,
# not source
extend_unique(self.javatest_pypaths, [pypath.get_bld().abspath()])
# Add buildcopy output nodes to dependencies
extend_unique(self.javatest_dep_nodes, [o for task in getattr(tg, 'tasks', []) for o in getattr(task, 'outputs', [])])
else:
# If buildcopy is not used, depend on sources instead
extend_unique(self.javatest_dep_nodes, tg.source)
extend_unique(self.javatest_pypaths, [pypath.abspath()])
if getattr(tg, 'link_task', None):
# For tasks with a link_task (C, C++, D et.c.) include their library paths:
if not isinstance(tg.link_task, ccroot.stlink_task):
extend_unique(self.javatest_dep_nodes, tg.link_task.outputs)
extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH)
if 'pyext' in tg.features:
# If the taskgen is extending Python we also want to add the interpreter libpath.
extend_unique(self.javatest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
else:
# Only add to libpath if the link task is not a Python extension
extend_unique(self.javatest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
if 'javac' in tg.features or 'jar' in tg.features:
if hasattr(tg, 'jar_task'):
# For Java JAR tasks depend on generated JAR
extend_unique(self.javatest_dep_nodes, tg.jar_task.outputs)
else:
# For Java non-JAR ones we need to glob generated files (Java output files are not predictable)
if hasattr(tg, 'outdir'):
base_node = tg.outdir
else:
base_node = tg.path.get_bld()
self.javatest_dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
@TaskGen.feature('javatest')
@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath', 'javatest_process_use')
def make_javatest(self):
"""
Creates a ``utest`` task with a populated environment for Java Unit test execution
@ -65,6 +166,9 @@ def make_javatest(self):
tsk = self.create_task('utest')
tsk.set_run_after(self.javac_task)
# Dependencies from recursive use analysis
tsk.dep_nodes.extend(self.javatest_dep_nodes)
# Put test input files as waf_unit_test relies on that for some prints and log generation
# If jtest_source is there, this is specially useful for passing XML for TestNG
# that contain test specification, use that as inputs, otherwise test sources
@ -97,6 +201,21 @@ def make_javatest(self):
if not hasattr(self, 'ut_env'):
self.ut_env = dict(os.environ)
def add_paths(var, lst):
# Add list of paths to a variable, lst can contain strings or nodes
lst = [ str(n) for n in lst ]
Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
add_paths('PYTHONPATH', self.javatest_pypaths)
if Utils.is_win32:
add_paths('PATH', self.javatest_libpaths)
elif Utils.unversioned_sys_platform() == 'darwin':
add_paths('DYLD_LIBRARY_PATH', self.javatest_libpaths)
add_paths('LD_LIBRARY_PATH', self.javatest_libpaths)
else:
add_paths('LD_LIBRARY_PATH', self.javatest_libpaths)
def configure(ctx):
cp = ctx.env.CLASSPATH or '.'

View File

@ -0,0 +1,46 @@
#!/usr/bin/env python
# encoding: utf-8
# Rafaël Kooi 2019
from waflib import TaskGen
@TaskGen.feature('c', 'cxx', 'fc')
@TaskGen.after_method('propagate_uselib_vars')
def add_pdb_per_object(self):
"""For msvc/fortran, specify a unique compile pdb per object, to work
around LNK4099. Flags are updated with a unique /Fd flag based on the
task output name. This is separate from the link pdb.
"""
if not hasattr(self, 'compiled_tasks'):
return
link_task = getattr(self, 'link_task', None)
for task in self.compiled_tasks:
if task.inputs and task.inputs[0].name.lower().endswith('.rc'):
continue
add_pdb = False
for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'):
# several languages may be used at once
for flag in task.env[flagname]:
if flag[1:].lower() == 'zi':
add_pdb = True
break
if add_pdb:
node = task.outputs[0].change_ext('.pdb')
pdb_flag = '/Fd:' + node.abspath()
for flagname in ('CFLAGS', 'CXXFLAGS', 'FCFLAGS'):
buf = [pdb_flag]
for flag in task.env[flagname]:
if flag[1:3] == 'Fd' or flag[1:].lower() == 'fs' or flag[1:].lower() == 'mp':
continue
buf.append(flag)
task.env[flagname] = buf
if link_task and not node in link_task.dep_nodes:
link_task.dep_nodes.append(node)
if not node in task.outputs:
task.outputs.append(node)

View File

@ -40,6 +40,8 @@ the following environment variables for the `pytest` test runner:
- `pytest_libpath` attribute is used to manually specify additional linker paths.
3. Java class search path (CLASSPATH) of any Java/Javalike dependency
Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens
because the extension might be part of a Python package or used standalone:
@ -119,6 +121,7 @@ def pytest_process_use(self):
self.pytest_use_seen = []
self.pytest_paths = [] # strings or Nodes
self.pytest_libpaths = [] # strings or Nodes
self.pytest_javapaths = [] # strings or Nodes
self.pytest_dep_nodes = []
names = self.to_list(getattr(self, 'use', []))
@ -157,6 +160,17 @@ def pytest_process_use(self):
extend_unique(self.pytest_dep_nodes, tg.source)
extend_unique(self.pytest_paths, [pypath.abspath()])
if 'javac' in tg.features:
# If a JAR is generated point to that, otherwise to directory
if getattr(tg, 'jar_task', None):
extend_unique(self.pytest_javapaths, [tg.jar_task.outputs[0].abspath()])
else:
extend_unique(self.pytest_javapaths, [tg.path.get_bld()])
# And add respective dependencies if present
if tg.use_lst:
extend_unique(self.pytest_javapaths, tg.use_lst)
if getattr(tg, 'link_task', None):
# For tasks with a link_task (C, C++, D et.c.) include their library paths:
if not isinstance(tg.link_task, ccroot.stlink_task):
@ -212,8 +226,9 @@ def make_pytest(self):
Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
# Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH
# Prepend dependency paths to PYTHONPATH, CLASSPATH and LD_LIBRARY_PATH
add_paths('PYTHONPATH', self.pytest_paths)
add_paths('CLASSPATH', self.pytest_javapaths)
if Utils.is_win32:
add_paths('PATH', self.pytest_libpaths)

View File

@ -0,0 +1,524 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2019 (ita)
"""
Filesystem-based cache system to share and re-use build artifacts
Cache access operations (copy to and from) are delegated to
independent pre-forked worker subprocesses.
The following environment variables may be set:
* WAFCACHE: several possibilities:
- File cache:
absolute path of the waf cache (~/.cache/wafcache_user,
where `user` represents the currently logged-in user)
- URL to a cache server, for example:
export WAFCACHE=http://localhost:8080/files/
in that case, GET/POST requests are made to urls of the form
http://localhost:8080/files/000000000/0 (cache management is then up to the server)
- GCS or S3 bucket
gs://my-bucket/
s3://my-bucket/
* WAFCACHE_NO_PUSH: if set, disables pushing to the cache
* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations
File cache specific options:
Files are copied using hard links by default; if the cache is located
onto another partition, the system switches to file copies instead.
* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M)
* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
and trim the cache (3 minutess)
Usage::
def build(bld):
bld.load('wafcache')
...
To troubleshoot::
waf clean build --zones=wafcache
"""
import atexit, base64, errno, fcntl, getpass, os, shutil, sys, time, traceback, urllib3
try:
import subprocess32 as subprocess
except ImportError:
import subprocess
base_cache = os.path.expanduser('~/.cache/')
if not os.path.isdir(base_cache):
base_cache = '/tmp/'
default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser())
CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir)
TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000))
EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3))
EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
OK = "ok"
try:
import cPickle
except ImportError:
import pickle as cPickle
if __name__ != '__main__':
from waflib import Task, Logs, Utils, Build
def can_retrieve_cache(self):
"""
New method for waf Task classes
"""
if not self.outputs:
return False
self.cached = False
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)
files_to = [node.abspath() for node in self.outputs]
err = cache_command(ssig, [], files_to)
if err.startswith(OK):
if WAFCACHE_VERBOSITY:
Logs.pprint('CYAN', ' Fetched %r from cache' % files_to)
else:
Logs.debug('wafcache: fetched %r from cache', files_to)
else:
if WAFCACHE_VERBOSITY:
Logs.pprint('YELLOW', ' No cache entry %s' % files_to)
else:
Logs.debug('wafcache: No cache entry %s: %s', files_to, err)
return False
self.cached = True
return True
def put_files_cache(self):
"""
New method for waf Task classes
"""
if WAFCACHE_NO_PUSH or getattr(self, 'cached', None) or not self.outputs:
return
bld = self.generator.bld
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)
files_from = [node.abspath() for node in self.outputs]
err = cache_command(ssig, files_from, [])
if err.startswith(OK):
if WAFCACHE_VERBOSITY:
Logs.pprint('CYAN', ' Successfully uploaded %s to cache' % files_from)
else:
Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
else:
if WAFCACHE_VERBOSITY:
Logs.pprint('RED', ' Error caching step results %s: %s' % (files_from, err))
else:
Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
bld.task_sigs[self.uid()] = self.cache_sig
def hash_env_vars(self, env, vars_lst):
"""
Reimplement BuildContext.hash_env_vars so that the resulting hash does not depend on local paths
"""
if not env.table:
env = env.parent
if not env:
return Utils.SIG_NIL
idx = str(id(env)) + str(vars_lst)
try:
cache = self.cache_env
except AttributeError:
cache = self.cache_env = {}
else:
try:
return self.cache_env[idx]
except KeyError:
pass
v = str([env[a] for a in vars_lst])
v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
m = Utils.md5()
m.update(v.encode())
ret = m.digest()
Logs.debug('envhash: %r %r', ret, v)
cache[idx] = ret
return ret
def uid(self):
"""
Reimplement Task.uid() so that the signature does not depend on local paths
"""
try:
return self.uid_
except AttributeError:
m = Utils.md5()
src = self.generator.bld.srcnode
up = m.update
up(self.__class__.__name__.encode())
for x in self.inputs + self.outputs:
up(x.path_from(src).encode())
self.uid_ = m.digest()
return self.uid_
def make_cached(cls):
"""
Enable the waf cache for a given task class
"""
if getattr(cls, 'nocache', None) or getattr(cls, 'has_cache', False):
return
m1 = getattr(cls, 'run', None)
def run(self):
if getattr(self, 'nocache', False):
return m1(self)
if self.can_retrieve_cache():
return 0
return m1(self)
cls.run = run
m2 = getattr(cls, 'post_run', None)
def post_run(self):
if getattr(self, 'nocache', False):
return m2(self)
ret = m2(self)
self.put_files_cache()
if hasattr(self, 'chmod'):
for node in self.outputs:
os.chmod(node.abspath(), self.chmod)
return ret
cls.post_run = post_run
cls.has_cache = True
process_pool = []
def get_process():
"""
Returns a worker process that can process waf cache commands
The worker process is assumed to be returned to the process pool when unused
"""
try:
return process_pool.pop()
except IndexError:
filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'wafcache.py'
cmd = [sys.executable, '-c', Utils.readf(filepath)]
return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
def atexit_pool():
for k in process_pool:
try:
os.kill(k.pid, 9)
except OSError:
pass
else:
k.wait()
atexit.register(atexit_pool)
def build(bld):
"""
Called during the build process to enable file caching
"""
if process_pool:
# already called once
return
for x in range(bld.jobs):
process_pool.append(get_process())
Task.Task.can_retrieve_cache = can_retrieve_cache
Task.Task.put_files_cache = put_files_cache
Task.Task.uid = uid
Build.BuildContext.hash_env_vars = hash_env_vars
for x in reversed(list(Task.classes.values())):
make_cached(x)
def cache_command(sig, files_from, files_to):
"""
Create a command for cache worker processes, returns a pickled
base64-encoded tuple containing the task signature, a list of files to
cache and a list of files files to get from cache (one of the lists
is assumed to be empty)
"""
proc = get_process()
obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to]))
proc.stdin.write(obj)
proc.stdin.write('\n'.encode())
proc.stdin.flush()
obj = proc.stdout.readline()
if not obj:
raise OSError('Preforked sub-process %r died' % proc.pid)
process_pool.append(proc)
return cPickle.loads(base64.b64decode(obj))
try:
copyfun = os.link
except NameError:
copyfun = shutil.copy2
def atomic_copy(orig, dest):
"""
Copy files to the cache, the operation is atomic for a given file
"""
global copyfun
tmp = dest + '.tmp'
up = os.path.dirname(dest)
try:
os.makedirs(up)
except OSError:
pass
try:
copyfun(orig, tmp)
except OSError as e:
if e.errno == errno.EXDEV:
copyfun = shutil.copy2
copyfun(orig, tmp)
else:
raise
os.rename(tmp, dest)
def lru_trim():
"""
the cache folders take the form:
`CACHE_DIR/0b/0b180f82246d726ece37c8ccd0fb1cde2650d7bfcf122ec1f169079a3bfc0ab9`
they are listed in order of last access, and then removed
until the amount of folders is within TRIM_MAX_FOLDERS and the total space
taken by files is less than EVICT_MAX_BYTES
"""
lst = []
for up in os.listdir(CACHE_DIR):
if len(up) == 2:
sub = os.path.join(CACHE_DIR, up)
for hval in os.listdir(sub):
path = os.path.join(sub, hval)
size = 0
for fname in os.listdir(path):
size += os.lstat(os.path.join(path, fname)).st_size
lst.append((os.stat(path).st_mtime, size, path))
lst.sort(key=lambda x: x[0])
lst.reverse()
tot = sum(x[1] for x in lst)
while tot > EVICT_MAX_BYTES or len(lst) > TRIM_MAX_FOLDERS:
_, tmp_size, path = lst.pop()
tot -= tmp_size
tmp = path + '.tmp'
try:
shutil.rmtree(tmp)
except OSError:
pass
try:
os.rename(path, tmp)
except OSError:
sys.stderr.write('Could not rename %r to %r' % (path, tmp))
else:
try:
shutil.rmtree(tmp)
except OSError:
sys.stderr.write('Could not remove %r' % tmp)
sys.stderr.write("Cache trimmed: %r bytes in %r folders left\n" % (tot, len(lst)))
def lru_evict():
"""
Reduce the cache size
"""
lockfile = os.path.join(CACHE_DIR, 'all.lock')
try:
st = os.stat(lockfile)
except EnvironmentError as e:
if e.errno == errno.ENOENT:
with open(lockfile, 'w') as f:
f.write('')
return
else:
raise
if st.st_mtime < time.time() - EVICT_INTERVAL_MINUTES * 60:
# check every EVICT_INTERVAL_MINUTES minutes if the cache is too big
# OCLOEXEC is unnecessary because no processes are spawned
fd = os.open(lockfile, os.O_RDWR | os.O_CREAT, 0o755)
try:
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except EnvironmentError:
sys.stderr.write('another process is running!\n')
pass
else:
# now dow the actual cleanup
lru_trim()
os.utime(lockfile, None)
finally:
os.close(fd)
class netcache(object):
def __init__(self):
self.http = urllib3.PoolManager()
def url_of(self, sig, i):
return "%s/%s/%s" % (CACHE_DIR, sig, i)
def upload(self, file_path, sig, i):
url = self.url_of(sig, i)
with open(file_path, 'rb') as f:
file_data = f.read()
r = self.http.request('POST', url, timeout=60,
fields={ 'file': ('%s/%s' % (sig, i), file_data), })
if r.status >= 400:
raise OSError("Invalid status %r %r" % (url, r.status))
def download(self, file_path, sig, i):
url = self.url_of(sig, i)
with self.http.request('GET', url, preload_content=False, timeout=60) as inf:
if inf.status >= 400:
raise OSError("Invalid status %r %r" % (url, inf.status))
with open(file_path, 'wb') as out:
shutil.copyfileobj(inf, out)
def copy_to_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_from):
if not os.path.islink(x):
self.upload(x, sig, i)
except Exception:
return traceback.format_exc()
return OK
def copy_from_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_to):
self.download(x, sig, i)
except Exception:
return traceback.format_exc()
return OK
class fcache(object):
def __init__(self):
if not os.path.exists(CACHE_DIR):
os.makedirs(CACHE_DIR)
if not os.path.exists(CACHE_DIR):
raise ValueError('Could not initialize the cache directory')
def copy_to_cache(self, sig, files_from, files_to):
"""
Copy files to the cache, existing files are overwritten,
and the copy is atomic only for a given file, not for all files
that belong to a given task object
"""
try:
for i, x in enumerate(files_from):
dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
atomic_copy(x, dest)
except Exception:
return traceback.format_exc()
else:
# attempt trimming if caching was successful:
# we may have things to trim!
lru_evict()
return OK
def copy_from_cache(self, sig, files_from, files_to):
"""
Copy files from the cache
"""
try:
for i, x in enumerate(files_to):
orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
atomic_copy(orig, x)
# success! update the cache time
os.utime(os.path.join(CACHE_DIR, sig[:2], sig), None)
except Exception:
return traceback.format_exc()
return OK
class bucket_cache(object):
def bucket_copy(self, source, target):
if CACHE_DIR.startswith('s3://'):
cmd = ['aws', 's3', 'cp', source, target]
else:
cmd = ['gsutil', 'cp', source, target]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode:
raise OSError('Error copy %r to %r using: %r (exit %r):\n out:%s\n err:%s' % (
source, target, cmd, proc.returncode, out.decode(), err.decode()))
def copy_to_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_from):
dest = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
self.bucket_copy(x, dest)
except Exception:
return traceback.format_exc()
return OK
def copy_from_cache(self, sig, files_from, files_to):
try:
for i, x in enumerate(files_to):
orig = os.path.join(CACHE_DIR, sig[:2], sig, str(i))
self.bucket_copy(orig, x)
except EnvironmentError:
return traceback.format_exc()
return OK
def loop(service):
"""
This function is run when this file is run as a standalone python script,
it assumes a parent process that will communicate the commands to it
as pickled-encoded tuples (one line per command)
The commands are to copy files to the cache or copy files from the
cache to a target destination
"""
# one operation is performed at a single time by a single process
# therefore stdin never has more than one line
txt = sys.stdin.readline().strip()
if not txt:
# parent process probably ended
sys.exit(1)
ret = OK
[sig, files_from, files_to] = cPickle.loads(base64.b64decode(txt))
if files_from:
# TODO return early when pushing files upstream
ret = service.copy_to_cache(sig, files_from, files_to)
elif files_to:
# the build process waits for workers to (possibly) obtain files from the cache
ret = service.copy_from_cache(sig, files_from, files_to)
else:
ret = "Invalid command"
obj = base64.b64encode(cPickle.dumps(ret))
sys.stdout.write(obj.decode())
sys.stdout.write('\n')
sys.stdout.flush()
if __name__ == '__main__':
if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://'):
service = bucket_cache()
elif CACHE_DIR.startswith('http'):
service = netcache()
else:
service = fcache()
while 1:
try:
loop(service)
except KeyboardInterrupt:
break