1
0
mirror of https://github.com/samba-team/samba.git synced 2025-01-08 21:18:16 +03:00

third_party: Update waf to version 2.0.25

Signed-off-by: Andreas Schneider <asn@samba.org>
Reviewed-by: Stefan Metzmacher <metze@samba.org>
This commit is contained in:
Andreas Schneider 2023-01-04 09:39:45 +01:00 committed by Stefan Metzmacher
parent dd86376294
commit c29c487c5a
14 changed files with 249 additions and 96 deletions

2
buildtools/bin/waf vendored
View File

@ -32,7 +32,7 @@ POSSIBILITY OF SUCH DAMAGE.
import os, sys, inspect
VERSION="2.0.24"
VERSION="2.0.25"
REVISION="x"
GIT="x"
INSTALL="x"

View File

@ -38,7 +38,7 @@ LIB_PATH="shared"
os.environ['PYTHONUNBUFFERED'] = '1'
if Context.HEXVERSION not in (0x2001800,):
if Context.HEXVERSION not in (0x2001900,):
Logs.error('''
Please use the version of waf that comes with Samba, not
a system installed version. See http://wiki.samba.org/index.php/Waf

View File

@ -439,7 +439,7 @@ def find_program(self, filename, **kw):
var = kw.get('var', '')
if not var:
var = re.sub(r'[-.]', '_', filename[0].upper())
var = re.sub(r'\W', '_', filename[0].upper())
path_list = kw.get('path_list', '')
if path_list:

View File

@ -18,13 +18,13 @@ else:
import imp
# the following 3 constants are updated on each new release (do not touch)
HEXVERSION=0x2001800
HEXVERSION=0x2001900
"""Constant updated on new releases"""
WAFVERSION="2.0.24"
WAFVERSION="2.0.25"
"""Constant updated on new releases"""
WAFREVISION="1af97c71f5a6756abf36d0f78ed8fd551596d7cb"
WAFREVISION="2db0b41b2805cd5db3b55476c06b23c1e46d319f"
"""Git revision when the waf version is updated"""
WAFNAME="waf"

View File

@ -400,7 +400,7 @@ def feature(*k):
Decorator that registers a task generator method that will be executed when the
object attribute ``feature`` contains the corresponding key(s)::
from waflib.Task import feature
from waflib.TaskGen import feature
@feature('myfeature')
def myfunction(self):
print('that is my feature!')

View File

@ -111,7 +111,7 @@ def options(opt):
class MSVCVersion(object):
def __init__(self, ver):
m = re.search('^(.*)\s+(\d+[.]\d+)', ver)
m = re.search(r'^(.*)\s+(\d+[.]\d+)', ver)
if m:
self.name = m.group(1)
self.number = float(m.group(2))

View File

@ -53,7 +53,17 @@ py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
"""
DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
DISTUTILS_IMP = """
try:
from distutils.sysconfig import get_config_var, get_python_lib
except ImportError:
from sysconfig import get_config_var, get_path
def get_python_lib(*k, **kw):
keyword='platlib' if kw.get('plat_specific') else 'purelib'
if 'prefix' in kw:
return get_path(keyword, vars={'installed_base': kw['prefix'], 'platbase': kw['prefix']})
return get_path(keyword)
""".splitlines()
@before_method('process_source')
@feature('py')
@ -219,7 +229,7 @@ def get_python_variables(self, variables, imports=None):
try:
out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
except Errors.WafError:
self.fatal('The distutils module is unusable: install "python-devel"?')
self.fatal('Could not run %r' % self.env.PYTHON)
self.to_log(out)
return_values = []
for s in out.splitlines():
@ -291,7 +301,8 @@ def python_cross_compile(self, features='pyembed pyext'):
@conf
def check_python_headers(conf, features='pyembed pyext'):
"""
Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
Check for headers and libraries necessary to extend or embed python.
It may use the module *distutils* or sysconfig in newer Python versions.
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
* PYEXT: for compiling python extensions
@ -439,7 +450,7 @@ def check_python_headers(conf, features='pyembed pyext'):
env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
env.LIB_PYEXT = env.LIB_PYEMBED
conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
conf.to_log("Found an include path for Python extensions: %r\n" % (dct['INCLUDEPY'],))
env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
@ -452,15 +463,21 @@ def check_python_headers(conf, features='pyembed pyext'):
env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
if env.CC_NAME == "msvc":
from distutils.msvccompiler import MSVCCompiler
dist_compiler = MSVCCompiler()
dist_compiler.initialize()
env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
try:
from distutils.msvccompiler import MSVCCompiler
except ImportError:
# From https://github.com/python/cpython/blob/main/Lib/distutils/msvccompiler.py
env.append_value('CFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
env.append_value('CXXFLAGS_PYEXT', [ '/nologo', '/Ox', '/MD', '/W3', '/GX', '/DNDEBUG'])
env.append_value('LINKFLAGS_PYEXT', ['/DLL', '/nologo', '/INCREMENTAL:NO'])
else:
dist_compiler = MSVCCompiler()
dist_compiler.initialize()
env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
# See if it compiles
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Could not build a Python embedded interpreter')
@conf
def check_python_version(conf, minver=None):
@ -506,17 +523,9 @@ def check_python_version(conf, minver=None):
else:
# Finally, try to guess
if Utils.is_win32:
(python_LIBDEST, pydir) = conf.get_python_variables(
["get_config_var('LIBDEST') or ''",
"get_python_lib(standard_lib=0) or ''"])
(pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0) or ''"])
else:
python_LIBDEST = None
(pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
if python_LIBDEST is None:
if conf.env.LIBDIR:
python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver)
else:
python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver)
(pydir,) = conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
if 'PYTHONARCHDIR' in conf.env:
# Check if --pythonarchdir was specified
@ -526,7 +535,7 @@ def check_python_version(conf, minver=None):
pyarchdir = conf.environ['PYTHONARCHDIR']
else:
# Finally, try to guess
(pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
(pyarchdir, ) = conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
if not pyarchdir:
pyarchdir = pydir
@ -585,13 +594,12 @@ def check_python_module(conf, module_name, condition=''):
if ret == 'unknown version':
conf.fatal('Could not check the %s version' % module_name)
from distutils.version import LooseVersion
def num(*k):
if isinstance(k[0], int):
return LooseVersion('.'.join([str(x) for x in k]))
return Utils.loose_version('.'.join([str(x) for x in k]))
else:
return LooseVersion(k[0])
d = {'num': num, 'ver': LooseVersion(ret)}
return Utils.loose_version(k[0])
d = {'num': num, 'ver': Utils.loose_version(ret)}
ev = eval(condition, {}, d)
if not ev:
conf.fatal('The %s version does not satisfy the requirements' % module_name)

View File

@ -452,6 +452,8 @@ def console_encoding():
pass
else:
if codepage:
if 65001 == codepage and sys.version_info < (3, 3):
return 'utf-8'
return 'cp%d' % codepage
return sys.stdout.encoding or ('cp1252' if is_win32 else 'latin-1')
@ -868,6 +870,19 @@ def lib64():
return '64'
return ''
def loose_version(ver_str):
# private for the time being!
# see #2402
lst = re.split(r'([.]|\\d+|[a-zA-Z])', ver_str)
ver = []
for i, val in enumerate(lst):
try:
ver.append(int(val))
except ValueError:
if val != '.':
ver.append(val)
return ver
def sane_path(p):
# private function for the time being!
return os.path.abspath(os.path.expanduser(p))

View File

@ -169,7 +169,7 @@ class cpplint(Task.Task):
global critical_errors
with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key}
if params['CPPLINT_OUTPUT'] is 'waf':
if params['CPPLINT_OUTPUT'] == 'waf':
params['CPPLINT_OUTPUT'] = 'emacs'
params['CPPLINT'] = self.env.get_flat('CPPLINT')
cmd = Utils.subst_vars(CPPLINT_STR, params)

View File

@ -0,0 +1,52 @@
#! /usr/bin/env python
# encoding: utf-8
# Detection of the Fujitsu Fortran compiler for ARM64FX
import re
from waflib.Tools import fc,fc_config,fc_scan
from waflib.Configure import conf
from waflib.Tools.compiler_fc import fc_compiler
fc_compiler['linux'].append('fc_fujitsu')
@conf
def find_fujitsu(conf):
fc=conf.find_program(['frtpx'],var='FC')
conf.get_fujitsu_version(fc)
conf.env.FC_NAME='FUJITSU'
conf.env.FC_MOD_CAPITALIZATION='lower'
@conf
def fujitsu_flags(conf):
v=conf.env
v['_FCMODOUTFLAGS']=[]
v['FCFLAGS_DEBUG']=[]
v['FCFLAGS_fcshlib']=[]
v['LINKFLAGS_fcshlib']=[]
v['FCSTLIB_MARKER']=''
v['FCSHLIB_MARKER']=''
@conf
def get_fujitsu_version(conf,fc):
version_re=re.compile(r"frtpx\s*\(FRT\)\s*(?P<major>\d+)\.(?P<minor>\d+)\.",re.I).search
cmd=fc+['--version']
out,err=fc_config.getoutput(conf,cmd,stdin=False)
if out:
match=version_re(out)
else:
match=version_re(err)
if not match:
return(False)
conf.fatal('Could not determine the Fujitsu FRT Fortran compiler version.')
else:
k=match.groupdict()
conf.env['FC_VERSION']=(k['major'],k['minor'])
def configure(conf):
conf.find_fujitsu()
conf.find_program('ar',var='AR')
conf.add_os_flags('ARFLAGS')
if not conf.env.ARFLAGS:
conf.env.ARFLAGS=['rcs']
conf.fc_flags()
conf.fc_add_flags()
conf.fujitsu_flags()

View File

@ -17,7 +17,7 @@ Usage::
import os, re, threading
from waflib import Task, Logs, Utils, Errors
from waflib.Tools import c_preproc
from waflib.Tools import asm, c, c_preproc, cxx
from waflib.TaskGen import before_method, feature
lock = threading.Lock()

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Federico Pellegrin, 2016-2019 (fedepell) adapted for Python
# Federico Pellegrin, 2016-2022 (fedepell) adapted for Python
"""
This tool helps with finding Python Qt5 tools and libraries,
@ -137,7 +137,7 @@ class pyrcc(Task.Task):
Processes ``.qrc`` files
"""
color = 'BLUE'
run_str = '${QT_PYRCC} ${SRC} -o ${TGT}'
run_str = '${QT_PYRCC} ${QT_PYRCC_FLAGS} ${SRC} -o ${TGT}'
ext_out = ['.py']
def rcname(self):
@ -175,7 +175,7 @@ class ui5py(Task.Task):
Processes ``.ui`` files for python
"""
color = 'BLUE'
run_str = '${QT_PYUIC} ${SRC} -o ${TGT}'
run_str = '${QT_PYUIC} ${QT_PYUIC_FLAGS} ${SRC} -o ${TGT}'
ext_out = ['.py']
class ts2qm(Task.Task):
@ -216,17 +216,17 @@ def find_pyqt5_binaries(self):
self.find_program(['pyrcc5'], var='QT_PYRCC')
self.find_program(['pylupdate5'], var='QT_PYLUPDATE')
elif getattr(Options.options, 'want_pyside2', True):
self.find_program(['pyside2-uic'], var='QT_PYUIC')
self.find_program(['pyside2-rcc'], var='QT_PYRCC')
self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE')
self.find_program(['pyside2-uic','uic-qt5'], var='QT_PYUIC')
self.find_program(['pyside2-rcc','rcc-qt5'], var='QT_PYRCC')
self.find_program(['pyside2-lupdate','lupdate-qt5'], var='QT_PYLUPDATE')
elif getattr(Options.options, 'want_pyqt4', True):
self.find_program(['pyuic4'], var='QT_PYUIC')
self.find_program(['pyrcc4'], var='QT_PYRCC')
self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
else:
self.find_program(['pyuic5','pyside2-uic','pyuic4'], var='QT_PYUIC')
self.find_program(['pyrcc5','pyside2-rcc','pyrcc4'], var='QT_PYRCC')
self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4'], var='QT_PYLUPDATE')
self.find_program(['pyuic5','pyside2-uic','pyuic4','uic-qt5'], var='QT_PYUIC')
self.find_program(['pyrcc5','pyside2-rcc','pyrcc4','rcc-qt5'], var='QT_PYRCC')
self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4','lupdate-qt5'], var='QT_PYLUPDATE')
if not env.QT_PYUIC:
self.fatal('cannot find the uic compiler for python for qt5')

View File

@ -1,7 +1,15 @@
"""Support for Sphinx documentation
This is a wrapper for sphinx-build program. Please note that sphinx-build supports only one output format which can
passed to build via sphinx_output_format attribute. The default output format is html.
This is a wrapper for sphinx-build program. Please note that sphinx-build supports only
one output format at a time, but the tool can create multiple tasks to handle more.
The output formats can be passed via the sphinx_output_format, which is an array of
strings. For backwards compatibility if only one output is needed, it can be passed
as a single string.
The default output format is html.
Specific formats can be installed in different directories by specifying the
install_path_<FORMAT> attribute. If not defined, the standard install_path
will be used instead.
Example wscript:
@ -13,7 +21,8 @@ def build(bld):
features='sphinx',
sphinx_source='sources', # path to source directory
sphinx_options='-a -v', # sphinx-build program additional options
sphinx_output_format='man' # output format of sphinx documentation
sphinx_output_format=['html', 'man'], # output format of sphinx documentation
install_path_man='${DOCDIR}/man' # put man pages in a specific directory
)
"""
@ -43,30 +52,36 @@ def build_sphinx(self):
if not self.sphinx_source:
self.bld.fatal('Can\'t find sphinx_source: %r' % self.sphinx_source)
# In the taskgen we have the complete list of formats
Utils.def_attrs(self, sphinx_output_format='html')
self.env.SPHINX_OUTPUT_FORMAT = self.sphinx_output_format
self.sphinx_output_format = Utils.to_list(self.sphinx_output_format)
self.env.SPHINX_OPTIONS = getattr(self, 'sphinx_options', [])
for source_file in self.sphinx_source.ant_glob('**/*'):
self.bld.add_manual_dependency(self.sphinx_source, source_file)
sphinx_build_task = self.create_task('SphinxBuildingTask')
sphinx_build_task.set_inputs(self.sphinx_source)
sphinx_build_task.set_outputs(self.path.get_bld())
for cfmt in self.sphinx_output_format:
sphinx_build_task = self.create_task('SphinxBuildingTask')
sphinx_build_task.set_inputs(self.sphinx_source)
# In task we keep the specific format this task is generating
sphinx_build_task.env.SPHINX_OUTPUT_FORMAT = cfmt
# the sphinx-build results are in <build + output_format> directory
self.sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT)
self.sphinx_output_directory.mkdir()
Utils.def_attrs(self, install_path=get_install_path(self))
# the sphinx-build results are in <build + output_format> directory
sphinx_build_task.sphinx_output_directory = self.path.get_bld().make_node(cfmt)
sphinx_build_task.set_outputs(sphinx_build_task.sphinx_output_directory)
sphinx_build_task.sphinx_output_directory.mkdir()
Utils.def_attrs(sphinx_build_task, install_path=getattr(self, 'install_path_' + cfmt, getattr(self, 'install_path', get_install_path(sphinx_build_task))))
def get_install_path(tg):
if tg.env.SPHINX_OUTPUT_FORMAT == 'man':
return tg.env.MANDIR
elif tg.env.SPHINX_OUTPUT_FORMAT == 'info':
return tg.env.INFODIR
def get_install_path(object):
if object.env.SPHINX_OUTPUT_FORMAT == 'man':
return object.env.MANDIR
elif object.env.SPHINX_OUTPUT_FORMAT == 'info':
return object.env.INFODIR
else:
return tg.env.DOCDIR
return object.env.DOCDIR
class SphinxBuildingTask(Task.Task):
@ -96,10 +111,10 @@ class SphinxBuildingTask(Task.Task):
def add_install(self):
nodes = self.generator.sphinx_output_directory.ant_glob('**/*', quiet=True)
nodes = self.sphinx_output_directory.ant_glob('**/*', quiet=True)
self.outputs += nodes
self.generator.add_install_files(install_to=self.generator.install_path,
self.generator.add_install_files(install_to=self.install_path,
install_from=nodes,
postpone=False,
cwd=self.generator.sphinx_output_directory,
cwd=self.sphinx_output_directory.make_node(self.env.SPHINX_OUTPUT_FORMAT),
relative_trick=True)

View File

@ -39,7 +39,14 @@ File cache specific options:
* WAFCACHE_TRIM_MAX_FOLDER: maximum amount of tasks to cache (1M)
* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
and trim the cache (3 minutess)
and trim the cache (3 minutes)
Upload specific options:
* WAFCACHE_ASYNC_WORKERS: define a number of workers to upload results asynchronously
this may improve build performance with many/long file uploads
the default is unset (synchronous uploads)
* WAFCACHE_ASYNC_NOWAIT: do not wait for uploads to complete (default: False)
this requires asynchonous uploads to have an effect
Usage::
@ -49,10 +56,10 @@ Usage::
To troubleshoot::
waf clean build --zones=wafcache
waf clean build --zone=wafcache
"""
import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, traceback, urllib3, shlex
import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, threading, traceback, urllib3, shlex
try:
import subprocess32 as subprocess
except ImportError:
@ -71,6 +78,8 @@ EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
WAFCACHE_STATS = 1 if os.environ.get('WAFCACHE_STATS') else 0
WAFCACHE_ASYNC_WORKERS = os.environ.get('WAFCACHE_ASYNC_WORKERS')
WAFCACHE_ASYNC_NOWAIT = os.environ.get('WAFCACHE_ASYNC_NOWAIT')
OK = "ok"
re_waf_cmd = re.compile('(?P<src>%{SRC})|(?P<tgt>%{TGT})')
@ -99,7 +108,9 @@ def can_retrieve_cache(self):
self.generator.bld.cache_reqs += 1
files_to = [node.abspath() for node in self.outputs]
err = cache_command(ssig, [], files_to)
proc = get_process()
err = cache_command(proc, ssig, [], files_to)
process_pool.append(proc)
if err.startswith(OK):
if WAFCACHE_VERBOSITY:
Logs.pprint('CYAN', ' Fetched %r from cache' % files_to)
@ -132,23 +143,50 @@ def put_files_cache(self):
files_from.append(path)
bld = self.generator.bld
old_sig = self.signature()
for node in self.inputs:
try:
del node.ctx.cache_sig[node]
except KeyError:
pass
delattr(self, 'cache_sig')
sig = self.signature()
ssig = Utils.to_hex(self.uid() + sig)
err = cache_command(ssig, files_from, [])
def _async_put_files_cache(bld, ssig, files_from):
proc = get_process()
if WAFCACHE_ASYNC_WORKERS:
with bld.wafcache_lock:
if bld.wafcache_stop:
process_pool.append(proc)
return
bld.wafcache_procs.add(proc)
if err.startswith(OK):
if WAFCACHE_VERBOSITY:
Logs.pprint('CYAN', ' Successfully uploaded %s to cache' % files_from)
err = cache_command(proc, ssig, files_from, [])
process_pool.append(proc)
if err.startswith(OK):
if WAFCACHE_VERBOSITY:
Logs.pprint('CYAN', ' Successfully uploaded %s to cache' % files_from)
else:
Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
if WAFCACHE_STATS:
bld.cache_puts += 1
else:
Logs.debug('wafcache: Successfully uploaded %r to cache', files_from)
if WAFCACHE_STATS:
self.generator.bld.cache_puts += 1
if WAFCACHE_VERBOSITY:
Logs.pprint('RED', ' Error caching step results %s: %s' % (files_from, err))
else:
Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
if old_sig == sig:
ssig = Utils.to_hex(self.uid() + sig)
if WAFCACHE_ASYNC_WORKERS:
fut = bld.wafcache_executor.submit(_async_put_files_cache, bld, ssig, files_from)
bld.wafcache_uploads.append(fut)
else:
_async_put_files_cache(bld, ssig, files_from)
else:
if WAFCACHE_VERBOSITY:
Logs.pprint('RED', ' Error caching step results %s: %s' % (files_from, err))
else:
Logs.debug('wafcache: Error caching results %s: %s', files_from, err)
Logs.debug('wafcache: skipped %r upload due to late input modifications %r', self.outputs, self.inputs)
bld.task_sigs[self.uid()] = self.cache_sig
@ -245,19 +283,45 @@ def get_process():
return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
def atexit_pool():
for k in process_pool:
try:
os.kill(k.pid, 9)
except OSError:
pass
else:
k.wait()
for proc in process_pool:
proc.kill()
atexit.register(atexit_pool)
def build(bld):
"""
Called during the build process to enable file caching
"""
if WAFCACHE_ASYNC_WORKERS:
try:
num_workers = int(WAFCACHE_ASYNC_WORKERS)
except ValueError:
Logs.warn('Invalid WAFCACHE_ASYNC_WORKERS specified: %r' % WAFCACHE_ASYNC_WORKERS)
else:
from concurrent.futures import ThreadPoolExecutor
bld.wafcache_executor = ThreadPoolExecutor(max_workers=num_workers)
bld.wafcache_uploads = []
bld.wafcache_procs = set([])
bld.wafcache_stop = False
bld.wafcache_lock = threading.Lock()
def finalize_upload_async(bld):
if WAFCACHE_ASYNC_NOWAIT:
with bld.wafcache_lock:
bld.wafcache_stop = True
for fut in reversed(bld.wafcache_uploads):
fut.cancel()
for proc in bld.wafcache_procs:
proc.kill()
bld.wafcache_procs.clear()
else:
Logs.pprint('CYAN', '... waiting for wafcache uploads to complete (%s uploads)' % len(bld.wafcache_uploads))
bld.wafcache_executor.shutdown(wait=True)
bld.add_post_fun(finalize_upload_async)
if WAFCACHE_STATS:
# Init counter for statistics and hook to print results at the end
bld.cache_reqs = bld.cache_hits = bld.cache_puts = 0
@ -266,9 +330,8 @@ def build(bld):
hit_ratio = 0
if bld.cache_reqs > 0:
hit_ratio = (bld.cache_hits / bld.cache_reqs) * 100
Logs.pprint('CYAN', ' wafcache stats: requests: %s, hits, %s, ratio: %.2f%%, writes %s' %
Logs.pprint('CYAN', ' wafcache stats: %s requests, %s hits (ratio: %.2f%%), %s writes' %
(bld.cache_reqs, bld.cache_hits, hit_ratio, bld.cache_puts) )
bld.add_post_fun(printstats)
if process_pool:
@ -286,15 +349,13 @@ def build(bld):
for x in reversed(list(Task.classes.values())):
make_cached(x)
def cache_command(sig, files_from, files_to):
def cache_command(proc, sig, files_from, files_to):
"""
Create a command for cache worker processes, returns a pickled
base64-encoded tuple containing the task signature, a list of files to
cache and a list of files files to get from cache (one of the lists
is assumed to be empty)
"""
proc = get_process()
obj = base64.b64encode(cPickle.dumps([sig, files_from, files_to]))
proc.stdin.write(obj)
proc.stdin.write('\n'.encode())
@ -302,7 +363,6 @@ def cache_command(sig, files_from, files_to):
obj = proc.stdout.readline()
if not obj:
raise OSError('Preforked sub-process %r died' % proc.pid)
process_pool.append(proc)
return cPickle.loads(base64.b64decode(obj))
try:
@ -456,7 +516,10 @@ class netcache(object):
class fcache(object):
def __init__(self):
if not os.path.exists(CACHE_DIR):
os.makedirs(CACHE_DIR)
try:
os.makedirs(CACHE_DIR)
except OSError:
pass
if not os.path.exists(CACHE_DIR):
raise ValueError('Could not initialize the cache directory')