mirror of
https://github.com/samba-team/samba.git
synced 2025-03-27 22:50:26 +03:00
thirdparty:waf: New files for waf 1.9.10
Signed-off-by: Thomas Nagy <tnagy@waf.io> Reviewed-by: Alexander Bokovoy <ab@samba.org> Reviewed-by: Andrew Bartlett <abartlet@samba.org>
This commit is contained in:
parent
48cf9ccd2b
commit
cbc6534682
297
third_party/waf/wafadmin/3rdparty/ParallelDebug.py
vendored
297
third_party/waf/wafadmin/3rdparty/ParallelDebug.py
vendored
@ -1,297 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2007-2010 (ita)
|
||||
|
||||
"""
|
||||
debugging helpers for parallel compilation, outputs
|
||||
a svg file in the build directory
|
||||
"""
|
||||
|
||||
import os, time, sys, threading
|
||||
try: from Queue import Queue
|
||||
except: from queue import Queue
|
||||
import Runner, Options, Utils, Task, Logs
|
||||
from Constants import *
|
||||
|
||||
#import random
|
||||
#random.seed(100)
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
|
||||
help='title for the svg diagram', dest='dtitle')
|
||||
opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=1000, dest='dwidth')
|
||||
opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
|
||||
opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
|
||||
opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
|
||||
|
||||
# red #ff4d4d
|
||||
# green #4da74d
|
||||
# lila #a751ff
|
||||
|
||||
color2code = {
|
||||
'GREEN' : '#4da74d',
|
||||
'YELLOW' : '#fefe44',
|
||||
'PINK' : '#a751ff',
|
||||
'RED' : '#cc1d1d',
|
||||
'BLUE' : '#6687bb',
|
||||
'CYAN' : '#34e2e2',
|
||||
|
||||
}
|
||||
|
||||
mp = {}
|
||||
info = [] # list of (text,color)
|
||||
|
||||
def map_to_color(name):
|
||||
if name in mp:
|
||||
return mp[name]
|
||||
try:
|
||||
cls = Task.TaskBase.classes[name]
|
||||
except KeyError:
|
||||
return color2code['RED']
|
||||
if cls.color in mp:
|
||||
return mp[cls.color]
|
||||
if cls.color in color2code:
|
||||
return color2code[cls.color]
|
||||
return color2code['RED']
|
||||
|
||||
def loop(self):
|
||||
while 1:
|
||||
tsk=Runner.TaskConsumer.ready.get()
|
||||
tsk.master.set_running(1, id(threading.currentThread()), tsk)
|
||||
Runner.process_task(tsk)
|
||||
tsk.master.set_running(-1, id(threading.currentThread()), tsk)
|
||||
Runner.TaskConsumer.loop = loop
|
||||
|
||||
|
||||
old_start = Runner.Parallel.start
|
||||
def do_start(self):
|
||||
print Options.options
|
||||
try:
|
||||
Options.options.dband
|
||||
except AttributeError:
|
||||
raise ValueError('use def options(opt): opt.load("parallel_debug")!')
|
||||
|
||||
self.taskinfo = Queue()
|
||||
old_start(self)
|
||||
process_colors(self)
|
||||
Runner.Parallel.start = do_start
|
||||
|
||||
def set_running(self, by, i, tsk):
|
||||
self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by) )
|
||||
Runner.Parallel.set_running = set_running
|
||||
|
||||
def name2class(name):
|
||||
return name.replace(' ', '_').replace('.', '_')
|
||||
|
||||
def process_colors(producer):
|
||||
# first, cast the parameters
|
||||
tmp = []
|
||||
try:
|
||||
while True:
|
||||
tup = producer.taskinfo.get(False)
|
||||
tmp.append(list(tup))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
ini = float(tmp[0][2])
|
||||
except:
|
||||
return
|
||||
|
||||
if not info:
|
||||
seen = []
|
||||
for x in tmp:
|
||||
name = x[3]
|
||||
if not name in seen:
|
||||
seen.append(name)
|
||||
else:
|
||||
continue
|
||||
|
||||
info.append((name, map_to_color(name)))
|
||||
info.sort(key=lambda x: x[0])
|
||||
|
||||
thread_count = 0
|
||||
acc = []
|
||||
for x in tmp:
|
||||
thread_count += x[6]
|
||||
acc.append("%d %d %f %r %d %d %d" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count))
|
||||
f = open('pdebug.dat', 'w')
|
||||
#Utils.write('\n'.join(acc))
|
||||
f.write('\n'.join(acc))
|
||||
|
||||
tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
|
||||
|
||||
st = {}
|
||||
for l in tmp:
|
||||
if not l[0] in st:
|
||||
st[l[0]] = len(st.keys())
|
||||
tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
|
||||
THREAD_AMOUNT = len(st.keys())
|
||||
|
||||
st = {}
|
||||
for l in tmp:
|
||||
if not l[1] in st:
|
||||
st[l[1]] = len(st.keys())
|
||||
tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
|
||||
|
||||
|
||||
BAND = Options.options.dband
|
||||
|
||||
seen = {}
|
||||
acc = []
|
||||
for x in range(len(tmp)):
|
||||
line = tmp[x]
|
||||
id = line[1]
|
||||
|
||||
if id in seen:
|
||||
continue
|
||||
seen[id] = True
|
||||
|
||||
begin = line[2]
|
||||
thread_id = line[0]
|
||||
for y in range(x + 1, len(tmp)):
|
||||
line = tmp[y]
|
||||
if line[1] == id:
|
||||
end = line[2]
|
||||
#print id, thread_id, begin, end
|
||||
#acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
|
||||
acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3]) )
|
||||
break
|
||||
|
||||
if Options.options.dmaxtime < 0.1:
|
||||
gwidth = 1
|
||||
for x in tmp:
|
||||
m = BAND * x[2]
|
||||
if m > gwidth:
|
||||
gwidth = m
|
||||
else:
|
||||
gwidth = BAND * Options.options.dmaxtime
|
||||
|
||||
ratio = float(Options.options.dwidth) / gwidth
|
||||
gwidth = Options.options.dwidth
|
||||
|
||||
gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
|
||||
|
||||
out = []
|
||||
|
||||
out.append("""<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>
|
||||
<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\"
|
||||
\"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">
|
||||
<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.0\"
|
||||
x=\"%r\" y=\"%r\" width=\"%r\" height=\"%r\"
|
||||
id=\"svg602\" xml:space=\"preserve\">
|
||||
|
||||
<style type='text/css' media='screen'>
|
||||
g.over rect { stroke:#FF0000; fill-opacity:0.4 }
|
||||
</style>
|
||||
|
||||
<script type='text/javascript'><![CDATA[
|
||||
var svg = document.getElementsByTagName('svg')[0];
|
||||
var svgNS = svg.getAttribute('xmlns');
|
||||
svg.addEventListener('mouseover',function(e){
|
||||
var g = e.target.parentNode;
|
||||
var x = document.getElementById('r_'+g.id);
|
||||
if (x) {
|
||||
g.setAttribute('class', g.getAttribute('class')+' over');
|
||||
x.setAttribute('class', x.getAttribute('class')+' over');
|
||||
showInfo(e, g.id);
|
||||
}
|
||||
},false);
|
||||
svg.addEventListener('mouseout',function(e){
|
||||
var g = e.target.parentNode;
|
||||
var x = document.getElementById('r_'+g.id);
|
||||
if (x) {
|
||||
g.setAttribute('class',g.getAttribute('class').replace(' over',''));
|
||||
x.setAttribute('class',x.getAttribute('class').replace(' over',''));
|
||||
hideInfo(e);
|
||||
}
|
||||
},false);
|
||||
|
||||
function showInfo(evt, txt) {
|
||||
tooltip = document.getElementById('tooltip');
|
||||
|
||||
var t = document.getElementById('tooltiptext');
|
||||
t.firstChild.data = txt;
|
||||
|
||||
var x = evt.clientX+10;
|
||||
if (x > 200) { x -= t.getComputedTextLength() + 16; }
|
||||
var y = evt.clientY+30;
|
||||
tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
|
||||
tooltip.setAttributeNS(null,"visibility","visible");
|
||||
|
||||
var r = document.getElementById('tooltiprect');
|
||||
r.setAttribute('width', t.getComputedTextLength()+6)
|
||||
}
|
||||
|
||||
|
||||
function hideInfo(evt) {
|
||||
tooltip = document.getElementById('tooltip');
|
||||
tooltip.setAttributeNS(null,"visibility","hidden");
|
||||
}
|
||||
|
||||
]]></script>
|
||||
|
||||
<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
|
||||
<rect
|
||||
x='%r' y='%r'
|
||||
width='%r' height='%r' z-index='10'
|
||||
style=\"font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;\"
|
||||
/>\n
|
||||
|
||||
""" % (0, 0, gwidth + 4, gheight + 4, 0, 0, gwidth + 4, gheight + 4))
|
||||
|
||||
# main title
|
||||
if Options.options.dtitle:
|
||||
out.append("""<text x="%d" y="%d" style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">%s</text>
|
||||
""" % (gwidth/2, gheight - 5, Options.options.dtitle))
|
||||
|
||||
# the rectangles
|
||||
groups = {}
|
||||
for (x, y, w, h, clsname) in acc:
|
||||
try:
|
||||
groups[clsname].append((x, y, w, h))
|
||||
except:
|
||||
groups[clsname] = [(x, y, w, h)]
|
||||
|
||||
for cls in groups:
|
||||
|
||||
out.append("<g id='%s'>\n" % name2class(cls))
|
||||
|
||||
for (x, y, w, h) in groups[cls]:
|
||||
out.append(""" <rect
|
||||
x='%r' y='%r'
|
||||
width='%r' height='%r' z-index='11'
|
||||
style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
|
||||
/>\n""" % (2 + x*ratio, 2 + y, w*ratio, h, map_to_color(cls)))
|
||||
|
||||
out.append("</g>\n")
|
||||
|
||||
# output the caption
|
||||
cnt = THREAD_AMOUNT
|
||||
|
||||
for (text, color) in info:
|
||||
# caption box
|
||||
b = BAND/2
|
||||
out.append("""<g id='r_%s'><rect
|
||||
x='%r' y='%r'
|
||||
width='%r' height='%r'
|
||||
style=\"font-size:10;fill:%s;fill-rule:evenodd;stroke:#000000;stroke-width:0.2px;\"
|
||||
/>\n""" % (name2class(text), 2 + BAND, 5 + (cnt + 0.5) * BAND, b, b, color))
|
||||
|
||||
# caption text
|
||||
out.append("""<text
|
||||
style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
|
||||
x="%r" y="%d">%s</text></g>\n""" % (2 + 2 * BAND, 5 + (cnt + 0.5) * BAND + 10, text))
|
||||
cnt += 1
|
||||
|
||||
out.append("""
|
||||
<g transform="translate(0,0)" visibility="hidden" id="tooltip">
|
||||
<rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
|
||||
<text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
|
||||
</g>""")
|
||||
|
||||
out.append("\n</svg>")
|
||||
|
||||
#node = producer.bld.path.make_node('pdebug.svg')
|
||||
f = open('pdebug.svg', 'w')
|
||||
f.write("".join(out))
|
182
third_party/waf/wafadmin/3rdparty/batched_cc.py
vendored
182
third_party/waf/wafadmin/3rdparty/batched_cc.py
vendored
@ -1,182 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"""
|
||||
Batched builds - compile faster
|
||||
instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
|
||||
cc -c ../file1.c ../file2.c ../file3.c
|
||||
|
||||
Files are output on the directory where the compiler is called, and dependencies are more difficult
|
||||
to track (do not run the command on all source files if only one file changes)
|
||||
|
||||
As such, we do as if the files were compiled one by one, but no command is actually run:
|
||||
replace each cc/cpp Task by a TaskSlave
|
||||
A new task called TaskMaster collects the signatures from each slave and finds out the command-line
|
||||
to run.
|
||||
|
||||
To set this up, the method ccroot::create_task is replaced by a new version, to enable batched builds
|
||||
it is only necessary to import this module in the configuration (no other change required)
|
||||
"""
|
||||
|
||||
MAX_BATCH = 50
|
||||
MAXPARALLEL = False
|
||||
|
||||
EXT_C = ['.c', '.cc', '.cpp', '.cxx']
|
||||
|
||||
import os, threading
|
||||
import TaskGen, Task, ccroot, Build, Logs
|
||||
from TaskGen import extension, feature, before
|
||||
from Constants import *
|
||||
|
||||
cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} -c ${SRCLST}'
|
||||
cc_fun = Task.compile_fun_noshell('batched_cc', cc_str)[0]
|
||||
|
||||
cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} -c ${SRCLST}'
|
||||
cxx_fun = Task.compile_fun_noshell('batched_cxx', cxx_str)[0]
|
||||
|
||||
count = 70000
|
||||
class batch_task(Task.Task):
|
||||
color = 'RED'
|
||||
|
||||
after = 'cc cxx'
|
||||
before = 'cc_link cxx_link static_link'
|
||||
|
||||
def __str__(self):
|
||||
return '(batch compilation for %d slaves)\n' % len(self.slaves)
|
||||
|
||||
def __init__(self, *k, **kw):
|
||||
Task.Task.__init__(self, *k, **kw)
|
||||
self.slaves = []
|
||||
self.inputs = []
|
||||
self.hasrun = 0
|
||||
|
||||
global count
|
||||
count += 1
|
||||
self.idx = count
|
||||
|
||||
def add_slave(self, slave):
|
||||
self.slaves.append(slave)
|
||||
self.set_run_after(slave)
|
||||
|
||||
def runnable_status(self):
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return ASK_LATER
|
||||
|
||||
for t in self.slaves:
|
||||
#if t.executed:
|
||||
if t.hasrun != SKIPPED:
|
||||
return RUN_ME
|
||||
|
||||
return SKIP_ME
|
||||
|
||||
def run(self):
|
||||
outputs = []
|
||||
self.outputs = []
|
||||
|
||||
srclst = []
|
||||
slaves = []
|
||||
for t in self.slaves:
|
||||
if t.hasrun != SKIPPED:
|
||||
slaves.append(t)
|
||||
srclst.append(t.inputs[0].abspath(self.env))
|
||||
|
||||
self.env.SRCLST = srclst
|
||||
self.cwd = slaves[0].inputs[0].parent.abspath(self.env)
|
||||
|
||||
env = self.env
|
||||
app = env.append_unique
|
||||
cpppath_st = env['CPPPATH_ST']
|
||||
env._CCINCFLAGS = env.CXXINCFLAGS = []
|
||||
|
||||
# local flags come first
|
||||
# set the user-defined includes paths
|
||||
for i in env['INC_PATHS']:
|
||||
app('_CCINCFLAGS', cpppath_st % i.abspath())
|
||||
app('_CXXINCFLAGS', cpppath_st % i.abspath())
|
||||
app('_CCINCFLAGS', cpppath_st % i.abspath(env))
|
||||
app('_CXXINCFLAGS', cpppath_st % i.abspath(env))
|
||||
|
||||
# set the library include paths
|
||||
for i in env['CPPPATH']:
|
||||
app('_CCINCFLAGS', cpppath_st % i)
|
||||
app('_CXXINCFLAGS', cpppath_st % i)
|
||||
|
||||
if self.slaves[0].__class__.__name__ == 'cc':
|
||||
ret = cc_fun(self)
|
||||
else:
|
||||
ret = cxx_fun(self)
|
||||
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
for t in slaves:
|
||||
t.old_post_run()
|
||||
|
||||
from TaskGen import extension, feature, after
|
||||
|
||||
import cc, cxx
|
||||
def wrap(fun):
|
||||
def foo(self, node):
|
||||
# we cannot control the extension, this sucks
|
||||
self.obj_ext = '.o'
|
||||
|
||||
task = fun(self, node)
|
||||
if not getattr(self, 'masters', None):
|
||||
self.masters = {}
|
||||
self.allmasters = []
|
||||
|
||||
if not node.parent.id in self.masters:
|
||||
m = self.masters[node.parent.id] = self.master = self.create_task('batch')
|
||||
self.allmasters.append(m)
|
||||
else:
|
||||
m = self.masters[node.parent.id]
|
||||
if len(m.slaves) > MAX_BATCH:
|
||||
m = self.masters[node.parent.id] = self.master = self.create_task('batch')
|
||||
self.allmasters.append(m)
|
||||
|
||||
m.add_slave(task)
|
||||
return task
|
||||
return foo
|
||||
|
||||
c_hook = wrap(cc.c_hook)
|
||||
extension(cc.EXT_CC)(c_hook)
|
||||
|
||||
cxx_hook = wrap(cxx.cxx_hook)
|
||||
extension(cxx.EXT_CXX)(cxx_hook)
|
||||
|
||||
|
||||
@feature('cprogram', 'cshlib', 'cstaticlib')
|
||||
@after('apply_link')
|
||||
def link_after_masters(self):
|
||||
if getattr(self, 'allmasters', None):
|
||||
for m in self.allmasters:
|
||||
self.link_task.set_run_after(m)
|
||||
|
||||
for c in ['cc', 'cxx']:
|
||||
t = Task.TaskBase.classes[c]
|
||||
def run(self):
|
||||
pass
|
||||
|
||||
def post_run(self):
|
||||
#self.executed=1
|
||||
pass
|
||||
|
||||
def can_retrieve_cache(self):
|
||||
if self.old_can_retrieve_cache():
|
||||
for m in self.generator.allmasters:
|
||||
try:
|
||||
m.slaves.remove(self)
|
||||
except ValueError:
|
||||
pass #this task wasn't included in that master
|
||||
return 1
|
||||
else:
|
||||
return None
|
||||
|
||||
setattr(t, 'oldrun', t.__dict__['run'])
|
||||
setattr(t, 'run', run)
|
||||
setattr(t, 'old_post_run', t.post_run)
|
||||
setattr(t, 'post_run', post_run)
|
||||
setattr(t, 'old_can_retrieve_cache', t.can_retrieve_cache)
|
||||
setattr(t, 'can_retrieve_cache', can_retrieve_cache)
|
342
third_party/waf/wafadmin/3rdparty/boost.py
vendored
342
third_party/waf/wafadmin/3rdparty/boost.py
vendored
@ -1,342 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
#
|
||||
# partially based on boost.py written by Gernot Vormayr
|
||||
# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
|
||||
# modified by Bjoern Michaelsen, 2008
|
||||
# modified by Luca Fossati, 2008
|
||||
# rewritten for waf 1.5.1, Thomas Nagy, 2008
|
||||
#
|
||||
#def set_options(opt):
|
||||
# opt.tool_options('boost')
|
||||
# # ...
|
||||
#
|
||||
#def configure(conf):
|
||||
# # ... (e.g. conf.check_tool('g++'))
|
||||
# conf.check_tool('boost')
|
||||
# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
|
||||
#
|
||||
#def build(bld):
|
||||
# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
|
||||
#
|
||||
#ISSUES:
|
||||
# * find_includes should be called only once!
|
||||
# * support mandatory
|
||||
|
||||
######## boost update ###########
|
||||
## ITA: * the method get_boost_version_number does work
|
||||
## * the rest of the code has not really been tried
|
||||
# * make certain a demo is provided (in demos/adv for example)
|
||||
|
||||
# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
|
||||
|
||||
import os.path, glob, types, re, sys
|
||||
import Configure, config_c, Options, Utils, Logs
|
||||
from Logs import warn, debug
|
||||
from Configure import conf
|
||||
|
||||
boost_code = '''
|
||||
#include <iostream>
|
||||
#include <boost/version.hpp>
|
||||
int main() { std::cout << BOOST_VERSION << std::endl; }
|
||||
'''
|
||||
|
||||
boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
|
||||
boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
|
||||
|
||||
STATIC_NOSTATIC = 'nostatic'
|
||||
STATIC_BOTH = 'both'
|
||||
STATIC_ONLYSTATIC = 'onlystatic'
|
||||
|
||||
is_versiontag = re.compile('^\d+_\d+_?\d*$')
|
||||
is_threadingtag = re.compile('^mt$')
|
||||
is_abitag = re.compile('^[sgydpn]+$')
|
||||
is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
|
||||
is_pythontag=re.compile('^py[0-9]{2}$')
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
|
||||
opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
|
||||
|
||||
def string_to_version(s):
|
||||
version = s.split('.')
|
||||
if len(version) < 3: return 0
|
||||
return int(version[0])*100000 + int(version[1])*100 + int(version[2])
|
||||
|
||||
def version_string(version):
|
||||
major = version / 100000
|
||||
minor = version / 100 % 1000
|
||||
minor_minor = version % 100
|
||||
if minor_minor == 0:
|
||||
return "%d_%d" % (major, minor)
|
||||
else:
|
||||
return "%d_%d_%d" % (major, minor, minor_minor)
|
||||
|
||||
def libfiles(lib, pattern, lib_paths):
|
||||
result = []
|
||||
for lib_path in lib_paths:
|
||||
libname = pattern % ('boost_%s[!_]*' % lib)
|
||||
result += glob.glob(os.path.join(lib_path, libname))
|
||||
return result
|
||||
|
||||
@conf
|
||||
def get_boost_version_number(self, dir):
|
||||
"""silently retrieve the boost version number"""
|
||||
try:
|
||||
return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
|
||||
except Configure.ConfigurationError, e:
|
||||
return -1
|
||||
|
||||
def set_default(kw, var, val):
|
||||
if not var in kw:
|
||||
kw[var] = val
|
||||
|
||||
def tags_score(tags, kw):
|
||||
"""
|
||||
checks library tags
|
||||
|
||||
see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
|
||||
"""
|
||||
score = 0
|
||||
needed_tags = {
|
||||
'threading': kw['tag_threading'],
|
||||
'abi': kw['tag_abi'],
|
||||
'toolset': kw['tag_toolset'],
|
||||
'version': kw['tag_version'],
|
||||
'python': kw['tag_python']
|
||||
}
|
||||
|
||||
if kw['tag_toolset'] is None:
|
||||
v = kw['env']
|
||||
toolset = v['CXX_NAME']
|
||||
if v['CXX_VERSION']:
|
||||
version_no = v['CXX_VERSION'].split('.')
|
||||
toolset += version_no[0]
|
||||
if len(version_no) > 1:
|
||||
toolset += version_no[1]
|
||||
needed_tags['toolset'] = toolset
|
||||
|
||||
found_tags = {}
|
||||
for tag in tags:
|
||||
if is_versiontag.match(tag): found_tags['version'] = tag
|
||||
if is_threadingtag.match(tag): found_tags['threading'] = tag
|
||||
if is_abitag.match(tag): found_tags['abi'] = tag
|
||||
if is_toolsettag.match(tag): found_tags['toolset'] = tag
|
||||
if is_pythontag.match(tag): found_tags['python'] = tag
|
||||
|
||||
for tagname in needed_tags.iterkeys():
|
||||
if needed_tags[tagname] is not None and tagname in found_tags:
|
||||
if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
|
||||
score += kw['score_' + tagname][0]
|
||||
else:
|
||||
score += kw['score_' + tagname][1]
|
||||
return score
|
||||
|
||||
@conf
|
||||
def validate_boost(self, kw):
|
||||
ver = kw.get('version', '')
|
||||
|
||||
for x in 'min_version max_version version'.split():
|
||||
set_default(kw, x, ver)
|
||||
|
||||
set_default(kw, 'lib', '')
|
||||
kw['lib'] = Utils.to_list(kw['lib'])
|
||||
|
||||
set_default(kw, 'env', self.env)
|
||||
|
||||
set_default(kw, 'libpath', boost_libpath)
|
||||
set_default(kw, 'cpppath', boost_cpppath)
|
||||
|
||||
for x in 'tag_threading tag_version tag_toolset'.split():
|
||||
set_default(kw, x, None)
|
||||
set_default(kw, 'tag_abi', '^[^d]*$')
|
||||
|
||||
set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
|
||||
set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
|
||||
|
||||
set_default(kw, 'score_threading', (10, -10))
|
||||
set_default(kw, 'score_abi', (10, -10))
|
||||
set_default(kw, 'score_python', (10,-10))
|
||||
set_default(kw, 'score_toolset', (1, -1))
|
||||
set_default(kw, 'score_version', (100, -100))
|
||||
|
||||
set_default(kw, 'score_min', 0)
|
||||
set_default(kw, 'static', STATIC_NOSTATIC)
|
||||
set_default(kw, 'found_includes', False)
|
||||
set_default(kw, 'min_score', 0)
|
||||
|
||||
set_default(kw, 'errmsg', 'not found')
|
||||
set_default(kw, 'okmsg', 'ok')
|
||||
|
||||
@conf
|
||||
def find_boost_includes(self, kw):
|
||||
"""
|
||||
check every path in kw['cpppath'] for subdir
|
||||
that either starts with boost- or is named boost.
|
||||
|
||||
Then the version is checked and selected accordingly to
|
||||
min_version/max_version. The highest possible version number is
|
||||
selected!
|
||||
|
||||
If no versiontag is set the versiontag is set accordingly to the
|
||||
selected library and CPPPATH_BOOST is set.
|
||||
"""
|
||||
boostPath = getattr(Options.options, 'boostincludes', '')
|
||||
if boostPath:
|
||||
boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
|
||||
else:
|
||||
boostPath = Utils.to_list(kw['cpppath'])
|
||||
|
||||
min_version = string_to_version(kw.get('min_version', ''))
|
||||
max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
|
||||
|
||||
version = 0
|
||||
for include_path in boostPath:
|
||||
boost_paths = [p for p in glob.glob(os.path.join(include_path, 'boost*')) if os.path.isdir(p)]
|
||||
debug('BOOST Paths: %r' % boost_paths)
|
||||
for path in boost_paths:
|
||||
pathname = os.path.split(path)[-1]
|
||||
ret = -1
|
||||
if pathname == 'boost':
|
||||
path = include_path
|
||||
ret = self.get_boost_version_number(path)
|
||||
elif pathname.startswith('boost-'):
|
||||
ret = self.get_boost_version_number(path)
|
||||
ret = int(ret)
|
||||
|
||||
if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
|
||||
boost_path = path
|
||||
version = ret
|
||||
if not version:
|
||||
self.fatal('boost headers not found! (required version min: %s max: %s)'
|
||||
% (kw['min_version'], kw['max_version']))
|
||||
return False
|
||||
|
||||
found_version = version_string(version)
|
||||
versiontag = '^' + found_version + '$'
|
||||
if kw['tag_version'] is None:
|
||||
kw['tag_version'] = versiontag
|
||||
elif kw['tag_version'] != versiontag:
|
||||
warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
|
||||
env = self.env
|
||||
env['CPPPATH_BOOST'] = boost_path
|
||||
env['BOOST_VERSION'] = found_version
|
||||
self.found_includes = 1
|
||||
ret = 'Version %s (%s)' % (found_version, boost_path)
|
||||
return ret
|
||||
|
||||
@conf
|
||||
def find_boost_library(self, lib, kw):
|
||||
|
||||
def find_library_from_list(lib, files):
|
||||
lib_pattern = re.compile('.*boost_(.*?)\..*')
|
||||
result = (None, None)
|
||||
resultscore = kw['min_score'] - 1
|
||||
for file in files:
|
||||
m = lib_pattern.search(file, 1)
|
||||
if m:
|
||||
libname = m.group(1)
|
||||
libtags = libname.split('-')[1:]
|
||||
currentscore = tags_score(libtags, kw)
|
||||
if currentscore > resultscore:
|
||||
result = (libname, file)
|
||||
resultscore = currentscore
|
||||
return result
|
||||
|
||||
lib_paths = getattr(Options.options, 'boostlibs', '')
|
||||
if lib_paths:
|
||||
lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
|
||||
else:
|
||||
lib_paths = Utils.to_list(kw['libpath'])
|
||||
|
||||
v = kw.get('env', self.env)
|
||||
|
||||
(libname, file) = (None, None)
|
||||
if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
|
||||
st_env_prefix = 'LIB'
|
||||
files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
|
||||
(libname, file) = find_library_from_list(lib, files)
|
||||
if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
|
||||
st_env_prefix = 'STATICLIB'
|
||||
staticLibPattern = v['staticlib_PATTERN']
|
||||
if self.env['CC_NAME'] == 'msvc':
|
||||
staticLibPattern = 'lib' + staticLibPattern
|
||||
files = libfiles(lib, staticLibPattern, lib_paths)
|
||||
(libname, file) = find_library_from_list(lib, files)
|
||||
if libname is not None:
|
||||
v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
|
||||
if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
|
||||
v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
|
||||
else:
|
||||
v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
|
||||
return
|
||||
self.fatal('lib boost_' + lib + ' not found!')
|
||||
|
||||
@conf
|
||||
def check_boost(self, *k, **kw):
|
||||
"""
|
||||
This should be the main entry point
|
||||
|
||||
- min_version
|
||||
- max_version
|
||||
- version
|
||||
- include_path
|
||||
- lib_path
|
||||
- lib
|
||||
- toolsettag - None or a regexp
|
||||
- threadingtag - None or a regexp
|
||||
- abitag - None or a regexp
|
||||
- versiontag - WARNING: you should rather use version or min_version/max_version
|
||||
- static - look for static libs (values:
|
||||
'nostatic' or STATIC_NOSTATIC - ignore static libs (default)
|
||||
'both' or STATIC_BOTH - find static libs, too
|
||||
'onlystatic' or STATIC_ONLYSTATIC - find only static libs
|
||||
- score_version
|
||||
- score_abi
|
||||
- scores_threading
|
||||
- score_toolset
|
||||
* the scores are tuples (match_score, nomatch_score)
|
||||
match_score is the added to the score if the tag is matched
|
||||
nomatch_score is added when a tag is found and does not match
|
||||
- min_score
|
||||
"""
|
||||
|
||||
if not self.env['CXX']:
|
||||
self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
|
||||
self.validate_boost(kw)
|
||||
ret = None
|
||||
try:
|
||||
if not kw.get('found_includes', None):
|
||||
self.check_message_1(kw.get('msg_includes', 'boost headers'))
|
||||
ret = self.find_boost_includes(kw)
|
||||
|
||||
except Configure.ConfigurationError, e:
|
||||
if 'errmsg' in kw:
|
||||
self.check_message_2(kw['errmsg'], 'YELLOW')
|
||||
if 'mandatory' in kw:
|
||||
if Logs.verbose > 1:
|
||||
raise
|
||||
else:
|
||||
self.fatal('the configuration failed (see %r)' % self.log.name)
|
||||
else:
|
||||
if 'okmsg' in kw:
|
||||
self.check_message_2(kw.get('okmsg_includes', ret))
|
||||
|
||||
for lib in kw['lib']:
|
||||
self.check_message_1('library boost_'+lib)
|
||||
try:
|
||||
self.find_boost_library(lib, kw)
|
||||
except Configure.ConfigurationError, e:
|
||||
ret = False
|
||||
if 'errmsg' in kw:
|
||||
self.check_message_2(kw['errmsg'], 'YELLOW')
|
||||
if 'mandatory' in kw:
|
||||
if Logs.verbose > 1:
|
||||
raise
|
||||
else:
|
||||
self.fatal('the configuration failed (see %r)' % self.log.name)
|
||||
else:
|
||||
if 'okmsg' in kw:
|
||||
self.check_message_2(kw['okmsg'])
|
||||
|
||||
return ret
|
@ -1,53 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2015
|
||||
|
||||
"""
|
||||
Force tasks to use file timestamps to force partial rebuilds when touch-ing build files
|
||||
|
||||
touch out/libfoo.a
|
||||
... rebuild what depends on libfoo.a
|
||||
|
||||
to use::
|
||||
def options(opt):
|
||||
opt.tool_options('build_file_tracker')
|
||||
"""
|
||||
|
||||
import os
|
||||
import Task, Utils
|
||||
|
||||
def signature(self):
|
||||
try: return self.cache_sig[0]
|
||||
except AttributeError: pass
|
||||
|
||||
self.m = Utils.md5()
|
||||
|
||||
# explicit deps
|
||||
exp_sig = self.sig_explicit_deps()
|
||||
|
||||
# env vars
|
||||
var_sig = self.sig_vars()
|
||||
|
||||
# implicit deps
|
||||
imp_sig = Task.SIG_NIL
|
||||
if self.scan:
|
||||
try:
|
||||
imp_sig = self.sig_implicit_deps()
|
||||
except ValueError:
|
||||
return self.signature()
|
||||
|
||||
# timestamp dependency on build files only (source files are hashed)
|
||||
buf = []
|
||||
for k in self.inputs + getattr(self, 'dep_nodes', []) + self.generator.bld.node_deps.get(self.unique_id(), []):
|
||||
if k.id & 3 == 3:
|
||||
t = os.stat(k.abspath(self.env)).st_mtime
|
||||
buf.append(t)
|
||||
self.m.update(str(buf))
|
||||
|
||||
# we now have the signature (first element) and the details (for debugging)
|
||||
ret = self.m.digest()
|
||||
self.cache_sig = (ret, exp_sig, imp_sig, var_sig)
|
||||
return ret
|
||||
|
||||
Task.Task.signature_bak = Task.Task.signature # unused, kept just in case
|
||||
Task.Task.signature = signature # overridden
|
26
third_party/waf/wafadmin/3rdparty/fluid.py
vendored
26
third_party/waf/wafadmin/3rdparty/fluid.py
vendored
@ -1,26 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# encoding: utf-8
|
||||
# Grygoriy Fuchedzhy 2009
|
||||
|
||||
"""
|
||||
Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
|
||||
"""
|
||||
|
||||
import Task
|
||||
from TaskGen import extension
|
||||
|
||||
Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
|
||||
|
||||
@extension('.fl')
|
||||
def fluid(self, node):
|
||||
"""add the .fl to the source list; the cxx file generated will be compiled when possible"""
|
||||
cpp = node.change_ext('.cpp')
|
||||
hpp = node.change_ext('.hpp')
|
||||
self.create_task('fluid', node, [cpp, hpp])
|
||||
|
||||
if 'cxx' in self.features:
|
||||
self.allnodes.append(cpp)
|
||||
|
||||
def detect(conf):
|
||||
fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
|
||||
conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
|
127
third_party/waf/wafadmin/3rdparty/gccdeps.py
vendored
127
third_party/waf/wafadmin/3rdparty/gccdeps.py
vendored
@ -1,127 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008-2010 (ita)
|
||||
|
||||
"""
|
||||
Execute the tasks with gcc -MD, read the dependencies from the .d file
|
||||
and prepare the dependency calculation for the next run
|
||||
"""
|
||||
|
||||
import os, re, threading
|
||||
import Task, Logs, Utils, preproc
|
||||
from TaskGen import before, after, feature
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
preprocessor_flag = '-MD'
|
||||
|
||||
@feature('cc', 'c')
|
||||
@before('apply_core')
|
||||
def add_mmd_cc(self):
|
||||
if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
|
||||
self.env.append_value('CCFLAGS', preprocessor_flag)
|
||||
|
||||
@feature('cxx')
|
||||
@before('apply_core')
|
||||
def add_mmd_cxx(self):
|
||||
if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
|
||||
self.env.append_value('CXXFLAGS', preprocessor_flag)
|
||||
|
||||
def scan(self):
|
||||
"the scanner does not do anything initially"
|
||||
nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
|
||||
names = []
|
||||
return (nodes, names)
|
||||
|
||||
re_o = re.compile("\.o$")
|
||||
re_src = re.compile("^(\.\.)[\\/](.*)$")
|
||||
|
||||
def post_run(self):
|
||||
# The following code is executed by threads, it is not safe, so a lock is needed...
|
||||
|
||||
if getattr(self, 'cached', None):
|
||||
return Task.Task.post_run(self)
|
||||
|
||||
name = self.outputs[0].abspath(self.env)
|
||||
name = re_o.sub('.d', name)
|
||||
txt = Utils.readf(name)
|
||||
#os.unlink(name)
|
||||
|
||||
txt = txt.replace('\\\n', '')
|
||||
|
||||
lst = txt.strip().split(':')
|
||||
val = ":".join(lst[1:])
|
||||
val = val.split()
|
||||
|
||||
nodes = []
|
||||
bld = self.generator.bld
|
||||
|
||||
f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
|
||||
for x in val:
|
||||
if os.path.isabs(x):
|
||||
|
||||
if not preproc.go_absolute:
|
||||
continue
|
||||
|
||||
lock.acquire()
|
||||
try:
|
||||
node = bld.root.find_resource(x)
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
g = re.search(re_src, x)
|
||||
if g:
|
||||
x = g.group(2)
|
||||
lock.acquire()
|
||||
try:
|
||||
node = bld.bldnode.parent.find_resource(x)
|
||||
finally:
|
||||
lock.release()
|
||||
else:
|
||||
g = re.search(f, x)
|
||||
if g:
|
||||
x = g.group(2)
|
||||
lock.acquire()
|
||||
try:
|
||||
node = bld.srcnode.find_resource(x)
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
if id(node) == id(self.inputs[0]):
|
||||
# ignore the source file, it is already in the dependencies
|
||||
# this way, successful config tests may be retrieved from the cache
|
||||
continue
|
||||
|
||||
if not node:
|
||||
raise ValueError('could not find %r for %r' % (x, self))
|
||||
else:
|
||||
nodes.append(node)
|
||||
|
||||
Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
|
||||
|
||||
bld.node_deps[self.unique_id()] = nodes
|
||||
bld.raw_deps[self.unique_id()] = []
|
||||
|
||||
try:
|
||||
del self.cache_sig
|
||||
except:
|
||||
pass
|
||||
|
||||
Task.Task.post_run(self)
|
||||
|
||||
import Constants, Utils
|
||||
def sig_implicit_deps(self):
|
||||
try:
|
||||
return Task.Task.sig_implicit_deps(self)
|
||||
except Utils.WafError:
|
||||
return Constants.SIG_NIL
|
||||
|
||||
for name in 'cc cxx'.split():
|
||||
try:
|
||||
cls = Task.TaskBase.classes[name]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
cls.post_run = post_run
|
||||
cls.scan = scan
|
||||
cls.sig_implicit_deps = sig_implicit_deps
|
110
third_party/waf/wafadmin/3rdparty/go.py
vendored
110
third_party/waf/wafadmin/3rdparty/go.py
vendored
@ -1,110 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# go.py - Waf tool for the Go programming language
|
||||
# By: Tom Wambold <tom5760@gmail.com>
|
||||
|
||||
import platform, os
|
||||
|
||||
import Task
|
||||
import Utils
|
||||
from TaskGen import feature, extension, after
|
||||
|
||||
Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
|
||||
Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
|
||||
Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
|
||||
|
||||
def detect(conf):
|
||||
|
||||
def set_def(var, val):
|
||||
if not conf.env[var]:
|
||||
conf.env[var] = val
|
||||
|
||||
goarch = os.getenv("GOARCH")
|
||||
|
||||
if goarch == '386':
|
||||
set_def('GO_PLATFORM', 'i386')
|
||||
elif goarch == 'amd64':
|
||||
set_def('GO_PLATFORM', 'x86_64')
|
||||
elif goarch == 'arm':
|
||||
set_def('GO_PLATFORM', 'arm')
|
||||
else:
|
||||
set_def('GO_PLATFORM', platform.machine())
|
||||
|
||||
if conf.env.GO_PLATFORM == 'x86_64':
|
||||
set_def('GO_COMPILER', '6g')
|
||||
set_def('GO_LINKER', '6l')
|
||||
set_def('GO_EXTENSION', '.6')
|
||||
elif conf.env.GO_PLATFORM in ['i386', 'i486', 'i586', 'i686']:
|
||||
set_def('GO_COMPILER', '8g')
|
||||
set_def('GO_LINKER', '8l')
|
||||
set_def('GO_EXTENSION', '.8')
|
||||
elif conf.env.GO_PLATFORM == 'arm':
|
||||
set_def('GO_COMPILER', '5g')
|
||||
set_def('GO_LINKER', '5l')
|
||||
set_def('GO_EXTENSION', '.5')
|
||||
|
||||
if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
|
||||
raise conf.fatal('Unsupported platform ' + platform.machine())
|
||||
|
||||
set_def('GO_PACK', 'gopack')
|
||||
set_def('GO_PACK_EXTENSION', '.a')
|
||||
|
||||
conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
|
||||
conf.find_program(conf.env.GO_LINKER, var='GOL', mandatory=True)
|
||||
conf.find_program(conf.env.GO_PACK, var='GOP', mandatory=True)
|
||||
conf.find_program('cgo', var='CGO', mandatory=True)
|
||||
|
||||
@extension('.go')
|
||||
def compile_go(self, node):
|
||||
try:
|
||||
self.go_nodes.append(node)
|
||||
except AttributeError:
|
||||
self.go_nodes = [node]
|
||||
|
||||
@feature('go')
|
||||
@after('apply_core')
|
||||
def apply_compile_go(self):
|
||||
try:
|
||||
nodes = self.go_nodes
|
||||
except AttributeError:
|
||||
self.go_compile_task = None
|
||||
else:
|
||||
self.go_compile_task = self.create_task('gocompile',
|
||||
nodes,
|
||||
[self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
|
||||
|
||||
@feature('gopackage', 'goprogram')
|
||||
@after('apply_compile_go')
|
||||
def apply_goinc(self):
|
||||
if not getattr(self, 'go_compile_task', None):
|
||||
return
|
||||
|
||||
names = self.to_list(getattr(self, 'uselib_local', []))
|
||||
for name in names:
|
||||
obj = self.name_to_obj(name)
|
||||
if not obj:
|
||||
raise Utils.WafError('object %r was not found in uselib_local '
|
||||
'(required by %r)' % (lib_name, self.name))
|
||||
obj.post()
|
||||
self.go_compile_task.set_run_after(obj.go_package_task)
|
||||
self.go_compile_task.dep_nodes.extend(obj.go_package_task.outputs)
|
||||
self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
|
||||
self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
|
||||
|
||||
@feature('gopackage')
|
||||
@after('apply_goinc')
|
||||
def apply_gopackage(self):
|
||||
self.go_package_task = self.create_task('gopack',
|
||||
self.go_compile_task.outputs[0],
|
||||
self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
|
||||
self.go_package_task.set_run_after(self.go_compile_task)
|
||||
self.go_package_task.dep_nodes.extend(self.go_compile_task.outputs)
|
||||
|
||||
@feature('goprogram')
|
||||
@after('apply_goinc')
|
||||
def apply_golink(self):
|
||||
self.go_link_task = self.create_task('golink',
|
||||
self.go_compile_task.outputs[0],
|
||||
self.path.find_or_declare(self.target))
|
||||
self.go_link_task.set_run_after(self.go_compile_task)
|
||||
self.go_link_task.dep_nodes.extend(self.go_compile_task.outputs)
|
96
third_party/waf/wafadmin/3rdparty/lru_cache.py
vendored
96
third_party/waf/wafadmin/3rdparty/lru_cache.py
vendored
@ -1,96 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy 2011
|
||||
|
||||
import os, shutil, re
|
||||
import Options, Build, Logs
|
||||
|
||||
"""
|
||||
Apply a least recently used policy to the Waf cache.
|
||||
|
||||
For performance reasons, it is called after the build is complete.
|
||||
|
||||
We assume that the the folders are written atomically
|
||||
|
||||
Do export WAFCACHE=/tmp/foo-xyz where xyz represents the cache size in megabytes
|
||||
If missing, the default cache size will be set to 10GB
|
||||
"""
|
||||
|
||||
re_num = re.compile('[a-zA-Z_]+(\d+)')
|
||||
|
||||
CACHESIZE = 10*1024*1024*1024 # in bytes
|
||||
CLEANRATIO = 0.8
|
||||
DIRSIZE = 4096
|
||||
|
||||
def compile(self):
|
||||
if Options.cache_global and not Options.options.nocache:
|
||||
try:
|
||||
os.makedirs(Options.cache_global)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
self.raw_compile()
|
||||
finally:
|
||||
if Options.cache_global and not Options.options.nocache:
|
||||
self.sweep()
|
||||
|
||||
def sweep(self):
|
||||
global CACHESIZE
|
||||
CACHEDIR = Options.cache_global
|
||||
|
||||
# get the cache max size from the WAFCACHE filename
|
||||
re_num = re.compile('[a-zA-Z_]+(\d+)')
|
||||
val = re_num.sub('\\1', os.path.basename(Options.cache_global))
|
||||
try:
|
||||
CACHESIZE = int(val)
|
||||
except:
|
||||
pass
|
||||
|
||||
# map folder names to timestamps
|
||||
flist = {}
|
||||
for x in os.listdir(CACHEDIR):
|
||||
j = os.path.join(CACHEDIR, x)
|
||||
if os.path.isdir(j) and len(x) == 32: # dir names are md5 hexdigests
|
||||
flist[x] = [os.stat(j).st_mtime, 0]
|
||||
|
||||
for (x, v) in flist.items():
|
||||
cnt = DIRSIZE # each entry takes 4kB
|
||||
d = os.path.join(CACHEDIR, x)
|
||||
for k in os.listdir(d):
|
||||
cnt += os.stat(os.path.join(d, k)).st_size
|
||||
flist[x][1] = cnt
|
||||
|
||||
total = sum([x[1] for x in flist.values()])
|
||||
Logs.debug('lru: Cache size is %r' % total)
|
||||
|
||||
if total >= CACHESIZE:
|
||||
Logs.debug('lru: Trimming the cache since %r > %r' % (total, CACHESIZE))
|
||||
|
||||
# make a list to sort the folders by timestamp
|
||||
lst = [(p, v[0], v[1]) for (p, v) in flist.items()]
|
||||
lst.sort(key=lambda x: x[1]) # sort by timestamp
|
||||
lst.reverse()
|
||||
|
||||
while total >= CACHESIZE * CLEANRATIO:
|
||||
(k, t, s) = lst.pop()
|
||||
p = os.path.join(CACHEDIR, k)
|
||||
v = p + '.del'
|
||||
try:
|
||||
os.rename(p, v)
|
||||
except:
|
||||
# someone already did it
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
shutil.rmtree(v)
|
||||
except:
|
||||
# this should not happen, but who knows?
|
||||
Logs.warn('If you ever see this message, report it (%r)' % v)
|
||||
total -= s
|
||||
del flist[k]
|
||||
Logs.debug('lru: Total at the end %r' % total)
|
||||
|
||||
Build.BuildContext.raw_compile = Build.BuildContext.compile
|
||||
Build.BuildContext.compile = compile
|
||||
Build.BuildContext.sweep = sweep
|
34
third_party/waf/wafadmin/3rdparty/paranoid.py
vendored
34
third_party/waf/wafadmin/3rdparty/paranoid.py
vendored
@ -1,34 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# ita 2010
|
||||
|
||||
import Logs, Utils, Build, Task
|
||||
|
||||
def say(txt):
|
||||
Logs.warn("^o^: %s" % txt)
|
||||
|
||||
try:
|
||||
ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
|
||||
except Exception, e:
|
||||
pass
|
||||
else:
|
||||
def say(txt):
|
||||
f = Utils.cmd_output([ret, txt])
|
||||
Utils.pprint('PINK', f)
|
||||
|
||||
say('you make the errors, we detect them')
|
||||
|
||||
def check_task_classes(self):
|
||||
for x in Task.TaskBase.classes:
|
||||
if isinstance(x, Task.Task):
|
||||
if not getattr(x, 'ext_in', None) or getattr(x, 'before', None):
|
||||
say('class %s has no precedence constraints (ext_in/before)')
|
||||
if not getattr(x, 'ext_out', None) or getattr(x, 'after', None):
|
||||
say('class %s has no precedence constraints (ext_out/after)')
|
||||
|
||||
comp = Build.BuildContext.compile
|
||||
def compile(self):
|
||||
if not getattr(self, 'magic', None):
|
||||
check_task_classes(self)
|
||||
return comp(self)
|
||||
Build.BuildContext.compile = compile
|
275
third_party/waf/wafadmin/3rdparty/prefork.py
vendored
275
third_party/waf/wafadmin/3rdparty/prefork.py
vendored
@ -1,275 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2015 (ita)
|
||||
#
|
||||
# prefer the waf 1.8 version
|
||||
|
||||
"""
|
||||
The full samba build can be faster by ~10%, but there are a few limitations:
|
||||
* only one build process should be run at a time as the servers would use the same ports
|
||||
* only one build command is going to be called ("waf build configure build" would not work)
|
||||
|
||||
def build(bld):
|
||||
|
||||
mod = Utils.load_tool('prefork')
|
||||
mod.build(bld)
|
||||
...
|
||||
(build declarations after)
|
||||
"""
|
||||
|
||||
import os, re, socket, threading, sys, subprocess, time, atexit, traceback
|
||||
try:
|
||||
import SocketServer
|
||||
except ImportError:
|
||||
import socketserver as SocketServer
|
||||
try:
|
||||
from queue import Queue
|
||||
except ImportError:
|
||||
from Queue import Queue
|
||||
try:
|
||||
import cPickle
|
||||
except ImportError:
|
||||
import pickle as cPickle
|
||||
|
||||
DEFAULT_PORT = 51200
|
||||
|
||||
HEADER_SIZE = 128
|
||||
|
||||
REQ = 'REQ'
|
||||
RES = 'RES'
|
||||
BYE = 'BYE'
|
||||
|
||||
def make_header(params):
|
||||
header = ','.join(params)
|
||||
if sys.hexversion > 0x3000000:
|
||||
header = header.encode('iso8859-1')
|
||||
header = header.ljust(HEADER_SIZE)
|
||||
assert(len(header) == HEADER_SIZE)
|
||||
return header
|
||||
|
||||
|
||||
re_valid_query = re.compile('^[a-zA-Z0-9_, ]+$')
|
||||
class req(SocketServer.StreamRequestHandler):
|
||||
def handle(self):
|
||||
while 1:
|
||||
try:
|
||||
self.process_command()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
break
|
||||
|
||||
def process_command(self):
|
||||
query = self.rfile.read(HEADER_SIZE)
|
||||
if not query:
|
||||
return
|
||||
#print(len(query))
|
||||
assert(len(query) == HEADER_SIZE)
|
||||
if sys.hexversion > 0x3000000:
|
||||
query = query.decode('iso8859-1')
|
||||
#print "%r" % query
|
||||
if not re_valid_query.match(query):
|
||||
raise ValueError('Invalid query %r' % query)
|
||||
|
||||
query = query.strip().split(',')
|
||||
|
||||
if query[0] == REQ:
|
||||
self.run_command(query[1:])
|
||||
elif query[0] == BYE:
|
||||
raise ValueError('Exit')
|
||||
else:
|
||||
raise ValueError('Invalid query %r' % query)
|
||||
|
||||
def run_command(self, query):
|
||||
|
||||
size = int(query[0])
|
||||
data = self.rfile.read(size)
|
||||
assert(len(data) == size)
|
||||
kw = cPickle.loads(data)
|
||||
|
||||
# run command
|
||||
ret = out = err = exc = None
|
||||
cmd = kw['cmd']
|
||||
del kw['cmd']
|
||||
#print(cmd)
|
||||
|
||||
try:
|
||||
if kw['stdout'] or kw['stderr']:
|
||||
p = subprocess.Popen(cmd, **kw)
|
||||
(out, err) = p.communicate()
|
||||
ret = p.returncode
|
||||
else:
|
||||
ret = subprocess.Popen(cmd, **kw).wait()
|
||||
except Exception as e:
|
||||
ret = -1
|
||||
exc = str(e) + traceback.format_exc()
|
||||
|
||||
# write the results
|
||||
if out or err or exc:
|
||||
data = (out, err, exc)
|
||||
data = cPickle.dumps(data, -1)
|
||||
else:
|
||||
data = ''
|
||||
|
||||
params = [RES, str(ret), str(len(data))]
|
||||
|
||||
self.wfile.write(make_header(params))
|
||||
|
||||
if data:
|
||||
self.wfile.write(data)
|
||||
|
||||
def create_server(conn, cls):
|
||||
#SocketServer.ThreadingTCPServer.allow_reuse_address = True
|
||||
#server = SocketServer.ThreadingTCPServer(conn, req)
|
||||
|
||||
SocketServer.TCPServer.allow_reuse_address = True
|
||||
server = SocketServer.TCPServer(conn, req)
|
||||
#server.timeout = 6000 # seconds
|
||||
server.serve_forever(poll_interval=0.001)
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) > 1:
|
||||
port = int(sys.argv[1])
|
||||
else:
|
||||
port = DEFAULT_PORT
|
||||
#conn = (socket.gethostname(), port)
|
||||
conn = ("127.0.0.1", port)
|
||||
#print("listening - %r %r\n" % conn)
|
||||
create_server(conn, req)
|
||||
else:
|
||||
|
||||
import Runner, Utils
|
||||
|
||||
def init_task_pool(self):
|
||||
# lazy creation, and set a common pool for all task consumers
|
||||
pool = self.pool = []
|
||||
for i in range(self.numjobs):
|
||||
consumer = Runner.get_pool()
|
||||
pool.append(consumer)
|
||||
consumer.idx = i
|
||||
self.ready = Queue(0)
|
||||
def setq(consumer):
|
||||
consumer.ready = self.ready
|
||||
try:
|
||||
threading.current_thread().idx = consumer.idx
|
||||
except Exception as e:
|
||||
print(e)
|
||||
for x in pool:
|
||||
x.ready.put(setq)
|
||||
return pool
|
||||
Runner.Parallel.init_task_pool = init_task_pool
|
||||
|
||||
PORT = 51200
|
||||
|
||||
def make_server(idx):
|
||||
port = PORT + idx
|
||||
cmd = [sys.executable, os.path.abspath(__file__), str(port)]
|
||||
proc = subprocess.Popen(cmd)
|
||||
proc.port = port
|
||||
return proc
|
||||
|
||||
def make_conn(srv):
|
||||
#port = PORT + idx
|
||||
port = srv.port
|
||||
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
conn.connect(('127.0.0.1', port))
|
||||
return conn
|
||||
|
||||
SERVERS = []
|
||||
CONNS = []
|
||||
def close_all():
|
||||
while CONNS:
|
||||
conn = CONNS.pop()
|
||||
try:
|
||||
conn.close()
|
||||
except:
|
||||
pass
|
||||
while SERVERS:
|
||||
srv = SERVERS.pop()
|
||||
try:
|
||||
srv.kill()
|
||||
except:
|
||||
pass
|
||||
atexit.register(close_all)
|
||||
|
||||
def put_data(conn, data):
|
||||
conn.send(data)
|
||||
|
||||
def read_data(conn, siz):
|
||||
ret = conn.recv(siz)
|
||||
if not ret:
|
||||
print("closed connection?")
|
||||
|
||||
assert(len(ret) == siz)
|
||||
return ret
|
||||
|
||||
def exec_command(cmd, **kw):
|
||||
if 'log' in kw:
|
||||
log = kw['log']
|
||||
kw['stdout'] = kw['stderr'] = subprocess.PIPE
|
||||
del(kw['log'])
|
||||
else:
|
||||
kw['stdout'] = kw['stderr'] = None
|
||||
kw['shell'] = isinstance(cmd, str)
|
||||
|
||||
idx = threading.current_thread().idx
|
||||
kw['cmd'] = cmd
|
||||
|
||||
data = cPickle.dumps(kw, -1)
|
||||
params = [REQ, str(len(data))]
|
||||
header = make_header(params)
|
||||
|
||||
conn = CONNS[idx]
|
||||
|
||||
put_data(conn, header)
|
||||
put_data(conn, data)
|
||||
|
||||
data = read_data(conn, HEADER_SIZE)
|
||||
if sys.hexversion > 0x3000000:
|
||||
data = data.decode('iso8859-1')
|
||||
|
||||
lst = data.split(',')
|
||||
ret = int(lst[1])
|
||||
dlen = int(lst[2])
|
||||
|
||||
out = err = None
|
||||
if dlen:
|
||||
data = read_data(conn, dlen)
|
||||
(out, err, exc) = cPickle.loads(data)
|
||||
if exc:
|
||||
raise Utils.WafError('Execution failure: %s' % exc)
|
||||
|
||||
if out:
|
||||
log.write(out)
|
||||
if err:
|
||||
log.write(err)
|
||||
|
||||
return ret
|
||||
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
|
||||
# identifier of the current thread
|
||||
self.idx = len(SERVERS)
|
||||
|
||||
# create a server and wait for the connection
|
||||
srv = make_server(self.idx)
|
||||
SERVERS.append(srv)
|
||||
|
||||
conn = None
|
||||
for x in range(30):
|
||||
try:
|
||||
conn = make_conn(srv)
|
||||
break
|
||||
except socket.error:
|
||||
time.sleep(0.01)
|
||||
if not conn:
|
||||
raise ValueError('Could not start the server!')
|
||||
CONNS.append(conn)
|
||||
|
||||
self.setDaemon(1)
|
||||
self.start()
|
||||
Runner.TaskConsumer.__init__ = __init__
|
||||
|
||||
def build(bld):
|
||||
# dangerous, there is no other command hopefully
|
||||
Utils.exec_command = exec_command
|
@ -1,25 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
"""
|
||||
In this case, print the commands being executed as strings
|
||||
(the commands are usually lists, so this can be misleading)
|
||||
"""
|
||||
|
||||
import Build, Utils, Logs
|
||||
|
||||
def exec_command(self, cmd, **kw):
|
||||
txt = cmd
|
||||
if isinstance(cmd, list):
|
||||
txt = ' '.join(cmd)
|
||||
Logs.debug('runner: %s' % txt)
|
||||
if self.log:
|
||||
self.log.write('%s\n' % cmd)
|
||||
kw['log'] = self.log
|
||||
try:
|
||||
if not kw.get('cwd', None):
|
||||
kw['cwd'] = self.cwd
|
||||
except AttributeError:
|
||||
self.cwd = kw['cwd'] = self.bldnode.abspath()
|
||||
return Utils.exec_command(cmd, **kw)
|
||||
Build.BuildContext.exec_command = exec_command
|
||||
|
189
third_party/waf/wafadmin/3rdparty/swig.py
vendored
189
third_party/waf/wafadmin/3rdparty/swig.py
vendored
@ -1,189 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: UTF-8
|
||||
# Petar Forai
|
||||
# Thomas Nagy 2008
|
||||
|
||||
import re
|
||||
import Task, Utils, Logs
|
||||
from TaskGen import extension
|
||||
from Configure import conf
|
||||
import preproc
|
||||
|
||||
"""
|
||||
Welcome in the hell of adding tasks dynamically
|
||||
|
||||
swig interface files may be created at runtime, the module name may be unknown in advance
|
||||
|
||||
rev 5859 is much more simple
|
||||
"""
|
||||
|
||||
SWIG_EXTS = ['.swig', '.i']
|
||||
|
||||
swig_str = '${SWIG} ${SWIGFLAGS} ${_CCINCFLAGS} ${_CXXINCFLAGS} ${_CCDEFFLAGS} ${_CXXDEFFLAGS} ${SRC}'
|
||||
cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
|
||||
|
||||
def runnable_status(self):
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return ASK_LATER
|
||||
|
||||
if not getattr(self, 'init_outputs', None):
|
||||
self.init_outputs = True
|
||||
if not getattr(self, 'module', None):
|
||||
# search the module name
|
||||
txt = self.inputs[0].read(self.env)
|
||||
m = re_module.search(txt)
|
||||
if not m:
|
||||
raise ValueError("could not find the swig module name")
|
||||
self.module = m.group(1)
|
||||
|
||||
swig_c(self)
|
||||
|
||||
# add the language-specific output files as nodes
|
||||
# call funs in the dict swig_langs
|
||||
for x in self.env['SWIGFLAGS']:
|
||||
# obtain the language
|
||||
x = x[1:]
|
||||
try:
|
||||
fun = swig_langs[x]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
fun(self)
|
||||
|
||||
return Task.Task.runnable_status(self)
|
||||
setattr(cls, 'runnable_status', runnable_status)
|
||||
|
||||
re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
|
||||
|
||||
re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
|
||||
re_2 = re.compile('%include "(.*)"', re.M)
|
||||
re_3 = re.compile('#include "(.*)"', re.M)
|
||||
|
||||
def scan(self):
|
||||
"scan for swig dependencies, climb the .i files"
|
||||
env = self.env
|
||||
|
||||
lst_src = []
|
||||
|
||||
seen = []
|
||||
to_see = [self.inputs[0]]
|
||||
|
||||
while to_see:
|
||||
node = to_see.pop(0)
|
||||
if node.id in seen:
|
||||
continue
|
||||
seen.append(node.id)
|
||||
lst_src.append(node)
|
||||
|
||||
# read the file
|
||||
code = node.read(env)
|
||||
code = preproc.re_nl.sub('', code)
|
||||
code = preproc.re_cpp.sub(preproc.repl, code)
|
||||
|
||||
# find .i files and project headers
|
||||
names = re_2.findall(code) + re_3.findall(code)
|
||||
for n in names:
|
||||
for d in self.generator.env.INC_PATHS + [node.parent]:
|
||||
u = d.find_resource(n)
|
||||
if u:
|
||||
to_see.append(u)
|
||||
break
|
||||
else:
|
||||
Logs.warn('could not find %r' % n)
|
||||
|
||||
# list of nodes this one depends on, and module name if present
|
||||
if Logs.verbose:
|
||||
Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
|
||||
return (lst_src, [])
|
||||
cls.scan = scan
|
||||
|
||||
# provide additional language processing
|
||||
swig_langs = {}
|
||||
def swig(fun):
|
||||
swig_langs[fun.__name__.replace('swig_', '')] = fun
|
||||
|
||||
def swig_c(self):
|
||||
ext = '.swigwrap_%d.c' % self.generator.idx
|
||||
flags = self.env['SWIGFLAGS']
|
||||
if '-c++' in flags:
|
||||
ext += 'xx'
|
||||
out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
|
||||
|
||||
try:
|
||||
if '-c++' in flags:
|
||||
fun = self.generator.cxx_hook
|
||||
else:
|
||||
fun = self.generator.c_hook
|
||||
except AttributeError:
|
||||
raise Utils.WafError('No c%s compiler was found to process swig files' % ('-c++' in flags and '++' or ''))
|
||||
|
||||
task = fun(out_node)
|
||||
task.set_run_after(self)
|
||||
|
||||
ge = self.generator.bld.generator
|
||||
ge.outstanding.insert(0, task)
|
||||
ge.total += 1
|
||||
|
||||
try:
|
||||
ltask = self.generator.link_task
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
ltask.inputs.append(task.outputs[0])
|
||||
|
||||
self.outputs.append(out_node)
|
||||
|
||||
if not '-o' in self.env['SWIGFLAGS']:
|
||||
self.env.append_value('SWIGFLAGS', '-o')
|
||||
self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
|
||||
|
||||
@swig
|
||||
def swig_python(tsk):
|
||||
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
|
||||
|
||||
@swig
|
||||
def swig_ocaml(tsk):
|
||||
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
|
||||
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
|
||||
|
||||
@extension(SWIG_EXTS)
|
||||
def i_file(self, node):
|
||||
# the task instance
|
||||
tsk = self.create_task('swig')
|
||||
tsk.set_inputs(node)
|
||||
tsk.module = getattr(self, 'swig_module', None)
|
||||
|
||||
flags = self.to_list(getattr(self, 'swig_flags', []))
|
||||
self.env.append_value('SWIGFLAGS', flags)
|
||||
|
||||
if not '-outdir' in flags:
|
||||
flags.append('-outdir')
|
||||
flags.append(node.parent.abspath(self.env))
|
||||
|
||||
@conf
|
||||
def check_swig_version(conf, minver=None):
|
||||
"""Check for a minimum swig version like conf.check_swig_version('1.3.28')
|
||||
or conf.check_swig_version((1,3,28)) """
|
||||
reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
|
||||
|
||||
swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
|
||||
|
||||
swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
|
||||
if isinstance(minver, basestring):
|
||||
minver = [int(s) for s in minver.split(".")]
|
||||
if isinstance(minver, tuple):
|
||||
minver = [int(s) for s in minver]
|
||||
result = (minver is None) or (minver[:3] <= swigver[:3])
|
||||
swigver_full = '.'.join(map(str, swigver))
|
||||
if result:
|
||||
conf.env['SWIG_VERSION'] = swigver_full
|
||||
minver_str = '.'.join(map(str, minver))
|
||||
if minver is None:
|
||||
conf.check_message_custom('swig version', '', swigver_full)
|
||||
else:
|
||||
conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
|
||||
return result
|
||||
|
||||
def detect(conf):
|
||||
swig = conf.find_program('swig', var='SWIG', mandatory=True)
|
112
third_party/waf/wafadmin/3rdparty/valadoc.py
vendored
112
third_party/waf/wafadmin/3rdparty/valadoc.py
vendored
@ -1,112 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: UTF-8
|
||||
# Nicolas Joseph 2009
|
||||
|
||||
from fnmatch import fnmatchcase
|
||||
import os, os.path, re, stat
|
||||
import Task, Utils, Node, Constants
|
||||
from TaskGen import feature, extension, after
|
||||
from Logs import debug, warn, error
|
||||
|
||||
VALADOC_STR = '${VALADOC}'
|
||||
|
||||
class valadoc_task(Task.Task):
|
||||
|
||||
vars = ['VALADOC', 'VALADOCFLAGS']
|
||||
color = 'BLUE'
|
||||
after = 'cxx_link cc_link'
|
||||
quiet = True
|
||||
|
||||
output_dir = ''
|
||||
doclet = ''
|
||||
package_name = ''
|
||||
package_version = ''
|
||||
files = []
|
||||
protected = True
|
||||
private = False
|
||||
inherit = False
|
||||
deps = False
|
||||
enable_non_null_experimental = False
|
||||
force = False
|
||||
|
||||
def runnable_status(self):
|
||||
return True
|
||||
|
||||
def run(self):
|
||||
if self.env['VALADOC']:
|
||||
if not self.env['VALADOCFLAGS']:
|
||||
self.env['VALADOCFLAGS'] = ''
|
||||
cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
|
||||
cmd.append ('-o %s' % self.output_dir)
|
||||
if getattr(self, 'doclet', None):
|
||||
cmd.append ('--doclet %s' % self.doclet)
|
||||
cmd.append ('--package-name %s' % self.package_name)
|
||||
if getattr(self, 'version', None):
|
||||
cmd.append ('--package-version %s' % self.package_version)
|
||||
if getattr(self, 'packages', None):
|
||||
for package in self.packages:
|
||||
cmd.append ('--pkg %s' % package)
|
||||
if getattr(self, 'vapi_dirs', None):
|
||||
for vapi_dir in self.vapi_dirs:
|
||||
cmd.append ('--vapidir %s' % vapi_dir)
|
||||
if not getattr(self, 'protected', None):
|
||||
cmd.append ('--no-protected')
|
||||
if getattr(self, 'private', None):
|
||||
cmd.append ('--private')
|
||||
if getattr(self, 'inherit', None):
|
||||
cmd.append ('--inherit')
|
||||
if getattr(self, 'deps', None):
|
||||
cmd.append ('--deps')
|
||||
if getattr(self, 'enable_non_null_experimental', None):
|
||||
cmd.append ('--enable-non-null-experimental')
|
||||
if getattr(self, 'force', None):
|
||||
cmd.append ('--force')
|
||||
cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
|
||||
return self.generator.bld.exec_command(' '.join(cmd))
|
||||
else:
|
||||
error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
|
||||
return -1
|
||||
|
||||
@feature('valadoc')
|
||||
def process_valadoc(self):
|
||||
task = getattr(self, 'task', None)
|
||||
if not task:
|
||||
task = self.create_task('valadoc')
|
||||
self.task = task
|
||||
if getattr(self, 'output_dir', None):
|
||||
task.output_dir = self.output_dir
|
||||
else:
|
||||
Utils.WafError('no output directory')
|
||||
if getattr(self, 'doclet', None):
|
||||
task.doclet = self.doclet
|
||||
else:
|
||||
Utils.WafError('no doclet directory')
|
||||
if getattr(self, 'package_name', None):
|
||||
task.package_name = self.package_name
|
||||
else:
|
||||
Utils.WafError('no package name')
|
||||
if getattr(self, 'package_version', None):
|
||||
task.package_version = self.package_version
|
||||
if getattr(self, 'packages', None):
|
||||
task.packages = Utils.to_list(self.packages)
|
||||
if getattr(self, 'vapi_dirs', None):
|
||||
task.vapi_dirs = Utils.to_list(self.vapi_dirs)
|
||||
if getattr(self, 'files', None):
|
||||
task.files = self.files
|
||||
else:
|
||||
Utils.WafError('no input file')
|
||||
if getattr(self, 'protected', None):
|
||||
task.protected = self.protected
|
||||
if getattr(self, 'private', None):
|
||||
task.private = self.private
|
||||
if getattr(self, 'inherit', None):
|
||||
task.inherit = self.inherit
|
||||
if getattr(self, 'deps', None):
|
||||
task.deps = self.deps
|
||||
if getattr(self, 'enable_non_null_experimental', None):
|
||||
task.enable_non_null_experimental = self.enable_non_null_experimental
|
||||
if getattr(self, 'force', None):
|
||||
task.force = self.force
|
||||
|
||||
def detect(conf):
|
||||
conf.find_program('valadoc', var='VALADOC', mandatory=False)
|
1036
third_party/waf/wafadmin/Build.py
vendored
1036
third_party/waf/wafadmin/Build.py
vendored
File diff suppressed because it is too large
Load Diff
442
third_party/waf/wafadmin/Configure.py
vendored
442
third_party/waf/wafadmin/Configure.py
vendored
@ -1,442 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2008 (ita)
|
||||
|
||||
"""
|
||||
Configuration system
|
||||
|
||||
A configuration instance is created when "waf configure" is called, it is used to:
|
||||
* create data dictionaries (Environment instances)
|
||||
* store the list of modules to import
|
||||
|
||||
The old model (copied from Scons) was to store logic (mapping file extensions to functions)
|
||||
along with the data. In Waf a way was found to separate that logic by adding an indirection
|
||||
layer (storing the names in the Environment instances)
|
||||
|
||||
In the new model, the logic is more object-oriented, and the user scripts provide the
|
||||
logic. The data files (Environments) must contain configuration data only (flags, ..).
|
||||
|
||||
Note: the c/c++ related code is in the module config_c
|
||||
"""
|
||||
|
||||
import os, shlex, sys, time
|
||||
try: import cPickle
|
||||
except ImportError: import pickle as cPickle
|
||||
import Environment, Utils, Options, Logs
|
||||
from Logs import warn
|
||||
from Constants import *
|
||||
|
||||
try:
|
||||
from urllib import request
|
||||
except:
|
||||
from urllib import urlopen
|
||||
else:
|
||||
urlopen = request.urlopen
|
||||
|
||||
conf_template = '''# project %(app)s configured on %(now)s by
|
||||
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
|
||||
# using %(args)s
|
||||
#
|
||||
'''
|
||||
|
||||
class ConfigurationError(Utils.WscriptError):
|
||||
pass
|
||||
|
||||
autoconfig = False
|
||||
"reconfigure the project automatically"
|
||||
|
||||
def find_file(filename, path_list):
|
||||
"""find a file in a list of paths
|
||||
@param filename: name of the file to search for
|
||||
@param path_list: list of directories to search
|
||||
@return: the first occurrence filename or '' if filename could not be found
|
||||
"""
|
||||
for directory in Utils.to_list(path_list):
|
||||
if os.path.exists(os.path.join(directory, filename)):
|
||||
return directory
|
||||
return ''
|
||||
|
||||
def find_program_impl(env, filename, path_list=[], var=None, environ=None):
|
||||
"""find a program in folders path_lst, and sets env[var]
|
||||
@param env: environment
|
||||
@param filename: name of the program to search for
|
||||
@param path_list: list of directories to search for filename
|
||||
@param var: environment value to be checked for in env or os.environ
|
||||
@return: either the value that is referenced with [var] in env or os.environ
|
||||
or the first occurrence filename or '' if filename could not be found
|
||||
"""
|
||||
|
||||
if not environ:
|
||||
environ = os.environ
|
||||
|
||||
try: path_list = path_list.split()
|
||||
except AttributeError: pass
|
||||
|
||||
if var:
|
||||
if env[var]: return env[var]
|
||||
if var in environ: env[var] = environ[var]
|
||||
|
||||
if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
|
||||
|
||||
ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
|
||||
for y in [filename+x for x in ext.split(',')]:
|
||||
for directory in path_list:
|
||||
x = os.path.join(directory, y)
|
||||
if os.path.isfile(x):
|
||||
if var: env[var] = x
|
||||
return x
|
||||
return ''
|
||||
|
||||
class ConfigurationContext(Utils.Context):
|
||||
tests = {}
|
||||
error_handlers = []
|
||||
def __init__(self, env=None, blddir='', srcdir=''):
|
||||
self.env = None
|
||||
self.envname = ''
|
||||
|
||||
self.environ = dict(os.environ)
|
||||
|
||||
self.line_just = 40
|
||||
|
||||
self.blddir = blddir
|
||||
self.srcdir = srcdir
|
||||
self.all_envs = {}
|
||||
|
||||
# curdir: necessary for recursion
|
||||
self.cwd = self.curdir = os.getcwd()
|
||||
|
||||
self.tools = [] # tools loaded in the configuration, and that will be loaded when building
|
||||
|
||||
self.setenv(DEFAULT)
|
||||
|
||||
self.lastprog = ''
|
||||
|
||||
self.hash = 0
|
||||
self.files = []
|
||||
|
||||
self.tool_cache = []
|
||||
|
||||
if self.blddir:
|
||||
self.post_init()
|
||||
|
||||
def post_init(self):
|
||||
|
||||
self.cachedir = os.path.join(self.blddir, CACHE_DIR)
|
||||
|
||||
path = os.path.join(self.blddir, WAF_CONFIG_LOG)
|
||||
try: os.unlink(path)
|
||||
except (OSError, IOError): pass
|
||||
|
||||
try:
|
||||
self.log = open(path, 'w')
|
||||
except (OSError, IOError):
|
||||
self.fatal('could not open %r for writing' % path)
|
||||
|
||||
app = Utils.g_module.APPNAME
|
||||
if app:
|
||||
ver = getattr(Utils.g_module, 'VERSION', '')
|
||||
if ver:
|
||||
app = "%s (%s)" % (app, ver)
|
||||
|
||||
now = time.ctime()
|
||||
pyver = sys.hexversion
|
||||
systype = sys.platform
|
||||
args = " ".join(sys.argv)
|
||||
wafver = WAFVERSION
|
||||
abi = ABI
|
||||
self.log.write(conf_template % vars())
|
||||
|
||||
def __del__(self):
|
||||
"""cleanup function: close config.log"""
|
||||
|
||||
# may be ran by the gc, not always after initialization
|
||||
if hasattr(self, 'log') and self.log:
|
||||
self.log.close()
|
||||
|
||||
def fatal(self, msg):
|
||||
raise ConfigurationError(msg)
|
||||
|
||||
def check_tool(self, input, tooldir=None, funs=None):
|
||||
"load a waf tool"
|
||||
|
||||
tools = Utils.to_list(input)
|
||||
if tooldir: tooldir = Utils.to_list(tooldir)
|
||||
for tool in tools:
|
||||
tool = tool.replace('++', 'xx')
|
||||
if tool == 'java': tool = 'javaw'
|
||||
if tool.lower() == 'unittest': tool = 'unittestw'
|
||||
# avoid loading the same tool more than once with the same functions
|
||||
# used by composite projects
|
||||
|
||||
mag = (tool, id(self.env), funs)
|
||||
if mag in self.tool_cache:
|
||||
continue
|
||||
self.tool_cache.append(mag)
|
||||
|
||||
module = None
|
||||
try:
|
||||
module = Utils.load_tool(tool, tooldir)
|
||||
except Exception, e:
|
||||
ex = e
|
||||
if Options.options.download:
|
||||
_3rdparty = os.path.normpath(Options.tooldir[0] + os.sep + '..' + os.sep + '3rdparty')
|
||||
|
||||
# try to download the tool from the repository then
|
||||
# the default is set to false
|
||||
for x in Utils.to_list(Options.remote_repo):
|
||||
for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
|
||||
url = '/'.join((x, sub, tool + '.py'))
|
||||
try:
|
||||
web = urlopen(url)
|
||||
if web.getcode() != 200:
|
||||
continue
|
||||
except Exception, e:
|
||||
# on python3 urlopen throws an exception
|
||||
continue
|
||||
else:
|
||||
loc = None
|
||||
try:
|
||||
loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
|
||||
loc.write(web.read())
|
||||
web.close()
|
||||
finally:
|
||||
if loc:
|
||||
loc.close()
|
||||
Logs.warn('downloaded %s from %s' % (tool, url))
|
||||
try:
|
||||
module = Utils.load_tool(tool, tooldir)
|
||||
except:
|
||||
Logs.warn('module %s from %s is unusable' % (tool, url))
|
||||
try:
|
||||
os.unlink(_3rdparty + os.sep + tool + '.py')
|
||||
except:
|
||||
pass
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
if not module:
|
||||
Logs.error('Could not load the tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
|
||||
raise ex
|
||||
else:
|
||||
Logs.error('Could not load the tool %r in %r (try the --download option?):\n%s' % (tool, sys.path, e))
|
||||
raise ex
|
||||
|
||||
if funs is not None:
|
||||
self.eval_rules(funs)
|
||||
else:
|
||||
func = getattr(module, 'detect', None)
|
||||
if func:
|
||||
if type(func) is type(find_file): func(self)
|
||||
else: self.eval_rules(func)
|
||||
|
||||
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
|
||||
|
||||
def sub_config(self, k):
|
||||
"executes the configure function of a wscript module"
|
||||
self.recurse(k, name='configure')
|
||||
|
||||
def pre_recurse(self, name_or_mod, path, nexdir):
|
||||
return {'conf': self, 'ctx': self}
|
||||
|
||||
def post_recurse(self, name_or_mod, path, nexdir):
|
||||
if not autoconfig:
|
||||
return
|
||||
self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
|
||||
self.files.append(path)
|
||||
|
||||
def store(self, file=''):
|
||||
"save the config results into the cache file"
|
||||
if not os.path.isdir(self.cachedir):
|
||||
os.makedirs(self.cachedir)
|
||||
|
||||
if not file:
|
||||
file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
|
||||
file.write('version = 0x%x\n' % HEXVERSION)
|
||||
file.write('tools = %r\n' % self.tools)
|
||||
file.close()
|
||||
|
||||
if not self.all_envs:
|
||||
self.fatal('nothing to store in the configuration context!')
|
||||
for key in self.all_envs:
|
||||
tmpenv = self.all_envs[key]
|
||||
tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
|
||||
|
||||
def set_env_name(self, name, env):
|
||||
"add a new environment called name"
|
||||
self.all_envs[name] = env
|
||||
return env
|
||||
|
||||
def retrieve(self, name, fromenv=None):
|
||||
"retrieve an environment called name"
|
||||
try:
|
||||
env = self.all_envs[name]
|
||||
except KeyError:
|
||||
env = Environment.Environment()
|
||||
env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
|
||||
self.all_envs[name] = env
|
||||
else:
|
||||
if fromenv: warn("The environment %s may have been configured already" % name)
|
||||
return env
|
||||
|
||||
def setenv(self, name):
|
||||
"enable the environment called name"
|
||||
self.env = self.retrieve(name)
|
||||
self.envname = name
|
||||
|
||||
def add_os_flags(self, var, dest=None):
|
||||
# do not use 'get' to make certain the variable is not defined
|
||||
try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
|
||||
except KeyError: pass
|
||||
|
||||
def check_message_1(self, sr):
|
||||
self.line_just = max(self.line_just, len(sr))
|
||||
for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
|
||||
self.log.write(x)
|
||||
Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
|
||||
|
||||
def check_message_2(self, sr, color='GREEN'):
|
||||
self.log.write(sr)
|
||||
self.log.write('\n')
|
||||
Utils.pprint(color, sr)
|
||||
|
||||
def check_message(self, th, msg, state, option=''):
|
||||
sr = 'Checking for %s %s' % (th, msg)
|
||||
self.check_message_1(sr)
|
||||
p = self.check_message_2
|
||||
if state: p('ok ' + str(option))
|
||||
else: p('not found', 'YELLOW')
|
||||
|
||||
# FIXME remove in waf 1.6
|
||||
# the parameter 'option' is not used (kept for compatibility)
|
||||
def check_message_custom(self, th, msg, custom, option='', color='PINK'):
|
||||
sr = 'Checking for %s %s' % (th, msg)
|
||||
self.check_message_1(sr)
|
||||
self.check_message_2(custom, color)
|
||||
|
||||
def msg(self, msg, result, color=None):
|
||||
"""Prints a configuration message 'Checking for xxx: ok'"""
|
||||
self.start_msg('Checking for ' + msg)
|
||||
|
||||
if not isinstance(color, str):
|
||||
color = result and 'GREEN' or 'YELLOW'
|
||||
|
||||
self.end_msg(result, color)
|
||||
|
||||
def start_msg(self, msg):
|
||||
try:
|
||||
if self.in_msg:
|
||||
return
|
||||
except:
|
||||
self.in_msg = 0
|
||||
self.in_msg += 1
|
||||
|
||||
self.line_just = max(self.line_just, len(msg))
|
||||
for x in ('\n', self.line_just * '-', '\n', msg, '\n'):
|
||||
self.log.write(x)
|
||||
Utils.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
|
||||
|
||||
def end_msg(self, result, color):
|
||||
self.in_msg -= 1
|
||||
if self.in_msg:
|
||||
return
|
||||
|
||||
if not color:
|
||||
color = 'GREEN'
|
||||
if result == True:
|
||||
msg = 'ok'
|
||||
elif result == False:
|
||||
msg = 'not found'
|
||||
color = 'YELLOW'
|
||||
else:
|
||||
msg = str(result)
|
||||
|
||||
self.log.write(msg)
|
||||
self.log.write('\n')
|
||||
Utils.pprint(color, msg)
|
||||
|
||||
def find_program(self, filename, path_list=[], var=None, mandatory=False):
|
||||
"wrapper that adds a configuration message"
|
||||
|
||||
ret = None
|
||||
if var:
|
||||
if self.env[var]:
|
||||
ret = self.env[var]
|
||||
elif var in os.environ:
|
||||
ret = os.environ[var]
|
||||
|
||||
if not isinstance(filename, list): filename = [filename]
|
||||
if not ret:
|
||||
for x in filename:
|
||||
ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
|
||||
if ret: break
|
||||
|
||||
self.check_message_1('Checking for program %s' % ' or '.join(filename))
|
||||
self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
|
||||
if ret:
|
||||
Utils.pprint('GREEN', str(ret))
|
||||
else:
|
||||
Utils.pprint('YELLOW', 'not found')
|
||||
if mandatory:
|
||||
self.fatal('The program %r is required' % filename)
|
||||
|
||||
if var:
|
||||
self.env[var] = ret
|
||||
return ret
|
||||
|
||||
def cmd_to_list(self, cmd):
|
||||
"commands may be written in pseudo shell like 'ccache g++'"
|
||||
if isinstance(cmd, str) and cmd.find(' '):
|
||||
try:
|
||||
os.stat(cmd)
|
||||
except OSError:
|
||||
return shlex.split(cmd)
|
||||
else:
|
||||
return [cmd]
|
||||
return cmd
|
||||
|
||||
def __getattr__(self, name):
|
||||
r = self.__class__.__dict__.get(name, None)
|
||||
if r: return r
|
||||
if name and name.startswith('require_'):
|
||||
|
||||
for k in ['check_', 'find_']:
|
||||
n = name.replace('require_', k)
|
||||
ret = self.__class__.__dict__.get(n, None)
|
||||
if ret:
|
||||
def run(*k, **kw):
|
||||
r = ret(self, *k, **kw)
|
||||
if not r:
|
||||
self.fatal('requirement failure')
|
||||
return r
|
||||
return run
|
||||
self.fatal('No such method %r' % name)
|
||||
|
||||
def eval_rules(self, rules):
|
||||
self.rules = Utils.to_list(rules)
|
||||
for x in self.rules:
|
||||
f = getattr(self, x)
|
||||
if not f: self.fatal("No such method '%s'." % x)
|
||||
try:
|
||||
f()
|
||||
except Exception, e:
|
||||
ret = self.err_handler(x, e)
|
||||
if ret == BREAK:
|
||||
break
|
||||
elif ret == CONTINUE:
|
||||
continue
|
||||
else:
|
||||
self.fatal(e)
|
||||
|
||||
def err_handler(self, fun, error):
|
||||
pass
|
||||
|
||||
def conf(f):
|
||||
"decorator: attach new configuration functions"
|
||||
setattr(ConfigurationContext, f.__name__, f)
|
||||
return f
|
||||
|
||||
def conftest(f):
|
||||
"decorator: attach new configuration tests (registered as strings)"
|
||||
ConfigurationContext.tests[f.__name__] = f
|
||||
return conf(f)
|
75
third_party/waf/wafadmin/Constants.py
vendored
75
third_party/waf/wafadmin/Constants.py
vendored
@ -1,75 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Yinon dot me gmail 2008
|
||||
|
||||
"""
|
||||
these constants are somewhat public, try not to mess them
|
||||
|
||||
maintainer: the version number is updated from the top-level wscript file
|
||||
"""
|
||||
|
||||
# do not touch these three lines, they are updated automatically
|
||||
HEXVERSION=0x105019
|
||||
WAFVERSION="1.5.19"
|
||||
WAFREVISION = "9709M"
|
||||
ABI = 7
|
||||
|
||||
# permissions
|
||||
O644 = 420
|
||||
O755 = 493
|
||||
|
||||
MAXJOBS = 99999999
|
||||
|
||||
CACHE_DIR = 'c4che'
|
||||
CACHE_SUFFIX = '.cache.py'
|
||||
DBFILE = '.wafpickle-%d' % ABI
|
||||
WSCRIPT_FILE = 'wscript'
|
||||
WSCRIPT_BUILD_FILE = 'wscript_build'
|
||||
WAF_CONFIG_LOG = 'config.log'
|
||||
WAF_CONFIG_H = 'config.h'
|
||||
|
||||
SIG_NIL = 'iluvcuteoverload'
|
||||
|
||||
VARIANT = '_VARIANT_'
|
||||
DEFAULT = 'default'
|
||||
|
||||
SRCDIR = 'srcdir'
|
||||
BLDDIR = 'blddir'
|
||||
APPNAME = 'APPNAME'
|
||||
VERSION = 'VERSION'
|
||||
|
||||
DEFINES = 'defines'
|
||||
UNDEFINED = ()
|
||||
|
||||
BREAK = "break"
|
||||
CONTINUE = "continue"
|
||||
|
||||
# task scheduler options
|
||||
JOBCONTROL = "JOBCONTROL"
|
||||
MAXPARALLEL = "MAXPARALLEL"
|
||||
NORMAL = "NORMAL"
|
||||
|
||||
# task state
|
||||
NOT_RUN = 0
|
||||
MISSING = 1
|
||||
CRASHED = 2
|
||||
EXCEPTION = 3
|
||||
SKIPPED = 8
|
||||
SUCCESS = 9
|
||||
|
||||
ASK_LATER = -1
|
||||
SKIP_ME = -2
|
||||
RUN_ME = -3
|
||||
|
||||
|
||||
LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
|
||||
HOUR_FORMAT = "%H:%M:%S"
|
||||
|
||||
TEST_OK = True
|
||||
|
||||
CFG_FILES = 'cfg_files'
|
||||
|
||||
# positive '->' install
|
||||
# negative '<-' uninstall
|
||||
INSTALL = 1337
|
||||
UNINSTALL = -1337
|
209
third_party/waf/wafadmin/Environment.py
vendored
209
third_party/waf/wafadmin/Environment.py
vendored
@ -1,209 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005 (ita)
|
||||
|
||||
"""Environment representation
|
||||
|
||||
There is one gotcha: getitem returns [] if the contents evals to False
|
||||
This means env['foo'] = {}; print env['foo'] will print [] not {}
|
||||
"""
|
||||
|
||||
import os, copy, re
|
||||
import Logs, Options, Utils
|
||||
from Constants import *
|
||||
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
|
||||
|
||||
class Environment(object):
|
||||
"""A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
|
||||
An environment instance can be stored into a file and loaded easily
|
||||
"""
|
||||
__slots__ = ("table", "parent")
|
||||
def __init__(self, filename=None):
|
||||
self.table = {}
|
||||
#self.parent = None
|
||||
|
||||
if filename:
|
||||
self.load(filename)
|
||||
|
||||
def __contains__(self, key):
|
||||
if key in self.table: return True
|
||||
try: return self.parent.__contains__(key)
|
||||
except AttributeError: return False # parent may not exist
|
||||
|
||||
def __str__(self):
|
||||
keys = set()
|
||||
cur = self
|
||||
while cur:
|
||||
keys.update(cur.table.keys())
|
||||
cur = getattr(cur, 'parent', None)
|
||||
keys = list(keys)
|
||||
keys.sort()
|
||||
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
while 1:
|
||||
x = self.table.get(key, None)
|
||||
if not x is None:
|
||||
return x
|
||||
self = self.parent
|
||||
except AttributeError:
|
||||
return []
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.table[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self.table[key]
|
||||
|
||||
def pop(self, key, *args):
|
||||
if len(args):
|
||||
return self.table.pop(key, *args)
|
||||
return self.table.pop(key)
|
||||
|
||||
def set_variant(self, name):
|
||||
self.table[VARIANT] = name
|
||||
|
||||
def variant(self):
|
||||
try:
|
||||
while 1:
|
||||
x = self.table.get(VARIANT, None)
|
||||
if not x is None:
|
||||
return x
|
||||
self = self.parent
|
||||
except AttributeError:
|
||||
return DEFAULT
|
||||
|
||||
def copy(self):
|
||||
# TODO waf 1.6 rename this method derive, #368
|
||||
newenv = Environment()
|
||||
newenv.parent = self
|
||||
return newenv
|
||||
|
||||
def detach(self):
|
||||
"""TODO try it
|
||||
modifying the original env will not change the copy"""
|
||||
tbl = self.get_merged_dict()
|
||||
try:
|
||||
delattr(self, 'parent')
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
keys = tbl.keys()
|
||||
for x in keys:
|
||||
tbl[x] = copy.deepcopy(tbl[x])
|
||||
self.table = tbl
|
||||
|
||||
def get_flat(self, key):
|
||||
s = self[key]
|
||||
if isinstance(s, str): return s
|
||||
return ' '.join(s)
|
||||
|
||||
def _get_list_value_for_modification(self, key):
|
||||
"""Gets a value that must be a list for further modification. The
|
||||
list may be modified inplace and there is no need to
|
||||
"self.table[var] = value" afterwards.
|
||||
"""
|
||||
try:
|
||||
value = self.table[key]
|
||||
except KeyError:
|
||||
try: value = self.parent[key]
|
||||
except AttributeError: value = []
|
||||
if isinstance(value, list):
|
||||
value = value[:]
|
||||
else:
|
||||
value = [value]
|
||||
else:
|
||||
if not isinstance(value, list):
|
||||
value = [value]
|
||||
self.table[key] = value
|
||||
return value
|
||||
|
||||
def append_value(self, var, value):
|
||||
current_value = self._get_list_value_for_modification(var)
|
||||
|
||||
if isinstance(value, list):
|
||||
current_value.extend(value)
|
||||
else:
|
||||
current_value.append(value)
|
||||
|
||||
def prepend_value(self, var, value):
|
||||
current_value = self._get_list_value_for_modification(var)
|
||||
|
||||
if isinstance(value, list):
|
||||
current_value = value + current_value
|
||||
# a new list: update the dictionary entry
|
||||
self.table[var] = current_value
|
||||
else:
|
||||
current_value.insert(0, value)
|
||||
|
||||
# prepend unique would be ambiguous
|
||||
def append_unique(self, var, value):
|
||||
current_value = self._get_list_value_for_modification(var)
|
||||
|
||||
if isinstance(value, list):
|
||||
for value_item in value:
|
||||
if value_item not in current_value:
|
||||
current_value.append(value_item)
|
||||
else:
|
||||
if value not in current_value:
|
||||
current_value.append(value)
|
||||
|
||||
def get_merged_dict(self):
|
||||
"""compute a merged table"""
|
||||
table_list = []
|
||||
env = self
|
||||
while 1:
|
||||
table_list.insert(0, env.table)
|
||||
try: env = env.parent
|
||||
except AttributeError: break
|
||||
merged_table = {}
|
||||
for table in table_list:
|
||||
merged_table.update(table)
|
||||
return merged_table
|
||||
|
||||
def store(self, filename):
|
||||
"Write the variables into a file"
|
||||
file = open(filename, 'w')
|
||||
merged_table = self.get_merged_dict()
|
||||
keys = list(merged_table.keys())
|
||||
keys.sort()
|
||||
for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
|
||||
file.close()
|
||||
|
||||
def load(self, filename):
|
||||
"Retrieve the variables from a file"
|
||||
tbl = self.table
|
||||
code = Utils.readf(filename)
|
||||
for m in re_imp.finditer(code):
|
||||
g = m.group
|
||||
tbl[g(2)] = eval(g(3))
|
||||
Logs.debug('env: %s', self.table)
|
||||
|
||||
def get_destdir(self):
|
||||
"return the destdir, useful for installing"
|
||||
if self.__getitem__('NOINSTALL'): return ''
|
||||
return Options.options.destdir
|
||||
|
||||
def update(self, d):
|
||||
for k, v in d.iteritems():
|
||||
self[k] = v
|
||||
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name in self.__slots__:
|
||||
return object.__getattr__(self, name)
|
||||
else:
|
||||
return self[name]
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if name in self.__slots__:
|
||||
object.__setattr__(self, name, value)
|
||||
else:
|
||||
self[name] = value
|
||||
|
||||
def __delattr__(self, name):
|
||||
if name in self.__slots__:
|
||||
object.__delattr__(self, name)
|
||||
else:
|
||||
del self[name]
|
133
third_party/waf/wafadmin/Logs.py
vendored
133
third_party/waf/wafadmin/Logs.py
vendored
@ -1,133 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005 (ita)
|
||||
|
||||
import ansiterm
|
||||
import os, re, logging, traceback, sys
|
||||
from Constants import *
|
||||
|
||||
zones = ''
|
||||
verbose = 0
|
||||
|
||||
colors_lst = {
|
||||
'USE' : True,
|
||||
'BOLD' :'\x1b[01;1m',
|
||||
'RED' :'\x1b[01;31m',
|
||||
'GREEN' :'\x1b[32m',
|
||||
'YELLOW':'\x1b[33m',
|
||||
'PINK' :'\x1b[35m',
|
||||
'BLUE' :'\x1b[01;34m',
|
||||
'CYAN' :'\x1b[36m',
|
||||
'NORMAL':'\x1b[0m',
|
||||
'cursor_on' :'\x1b[?25h',
|
||||
'cursor_off' :'\x1b[?25l',
|
||||
}
|
||||
|
||||
got_tty = False
|
||||
term = os.environ.get('TERM', 'dumb')
|
||||
if not term in ['dumb', 'emacs']:
|
||||
try:
|
||||
got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
import Utils
|
||||
|
||||
if not got_tty or 'NOCOLOR' in os.environ:
|
||||
colors_lst['USE'] = False
|
||||
|
||||
# test
|
||||
#if sys.platform == 'win32':
|
||||
# colors_lst['USE'] = True
|
||||
|
||||
def get_color(cl):
|
||||
if not colors_lst['USE']: return ''
|
||||
return colors_lst.get(cl, '')
|
||||
|
||||
class foo(object):
|
||||
def __getattr__(self, a):
|
||||
return get_color(a)
|
||||
def __call__(self, a):
|
||||
return get_color(a)
|
||||
|
||||
colors = foo()
|
||||
|
||||
re_log = re.compile(r'(\w+): (.*)', re.M)
|
||||
class log_filter(logging.Filter):
|
||||
def __init__(self, name=None):
|
||||
pass
|
||||
|
||||
def filter(self, rec):
|
||||
rec.c1 = colors.PINK
|
||||
rec.c2 = colors.NORMAL
|
||||
rec.zone = rec.module
|
||||
if rec.levelno >= logging.INFO:
|
||||
if rec.levelno >= logging.ERROR:
|
||||
rec.c1 = colors.RED
|
||||
elif rec.levelno >= logging.WARNING:
|
||||
rec.c1 = colors.YELLOW
|
||||
else:
|
||||
rec.c1 = colors.GREEN
|
||||
return True
|
||||
|
||||
zone = ''
|
||||
m = re_log.match(rec.msg)
|
||||
if m:
|
||||
zone = rec.zone = m.group(1)
|
||||
rec.msg = m.group(2)
|
||||
|
||||
if zones:
|
||||
return getattr(rec, 'zone', '') in zones or '*' in zones
|
||||
elif not verbose > 2:
|
||||
return False
|
||||
return True
|
||||
|
||||
class formatter(logging.Formatter):
|
||||
def __init__(self):
|
||||
logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
|
||||
|
||||
def format(self, rec):
|
||||
if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
|
||||
try:
|
||||
return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
|
||||
except:
|
||||
return rec.c1+rec.msg+rec.c2
|
||||
return logging.Formatter.format(self, rec)
|
||||
|
||||
def debug(*k, **kw):
|
||||
if verbose:
|
||||
k = list(k)
|
||||
k[0] = k[0].replace('\n', ' ')
|
||||
logging.debug(*k, **kw)
|
||||
|
||||
def error(*k, **kw):
|
||||
logging.error(*k, **kw)
|
||||
if verbose > 1:
|
||||
if isinstance(k[0], Utils.WafError):
|
||||
st = k[0].stack
|
||||
else:
|
||||
st = traceback.extract_stack()
|
||||
if st:
|
||||
st = st[:-1]
|
||||
buf = []
|
||||
for filename, lineno, name, line in st:
|
||||
buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
|
||||
if line:
|
||||
buf.append(' %s' % line.strip())
|
||||
if buf: logging.error("\n".join(buf))
|
||||
|
||||
warn = logging.warn
|
||||
info = logging.info
|
||||
|
||||
def init_log():
|
||||
log = logging.getLogger()
|
||||
log.handlers = []
|
||||
log.filters = []
|
||||
hdlr = logging.StreamHandler()
|
||||
hdlr.setFormatter(formatter())
|
||||
log.addHandler(hdlr)
|
||||
log.addFilter(log_filter())
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
# may be initialized more than once
|
||||
init_log()
|
701
third_party/waf/wafadmin/Node.py
vendored
701
third_party/waf/wafadmin/Node.py
vendored
@ -1,701 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005 (ita)
|
||||
|
||||
"""
|
||||
Node: filesystem structure, contains lists of nodes
|
||||
|
||||
IMPORTANT:
|
||||
1. Each file/folder is represented by exactly one node.
|
||||
|
||||
2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
|
||||
unused class members increase the .wafpickle file size sensibly with lots of objects.
|
||||
|
||||
3. The build is launched from the top of the build dir (for example, in _build_/).
|
||||
|
||||
4. Node should not be instantiated directly.
|
||||
Each instance of Build.BuildContext has a Node subclass.
|
||||
(aka: 'Nodu', see BuildContext initializer)
|
||||
The BuildContext is referenced here as self.__class__.bld
|
||||
Its Node class is referenced here as self.__class__
|
||||
|
||||
The public and advertised apis are the following:
|
||||
${TGT} -> dir/to/file.ext
|
||||
${TGT[0].base()} -> dir/to/file
|
||||
${TGT[0].dir(env)} -> dir/to
|
||||
${TGT[0].file()} -> file.ext
|
||||
${TGT[0].file_base()} -> file
|
||||
${TGT[0].suffix()} -> .ext
|
||||
${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
|
||||
|
||||
"""
|
||||
|
||||
import os, sys, fnmatch, re, stat
|
||||
import Utils, Constants
|
||||
|
||||
UNDEFINED = 0
|
||||
DIR = 1
|
||||
FILE = 2
|
||||
BUILD = 3
|
||||
|
||||
type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
|
||||
|
||||
# These fnmatch expressions are used by default to prune the directory tree
|
||||
# while doing the recursive traversal in the find_iter method of the Node class.
|
||||
prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
|
||||
|
||||
# These fnmatch expressions are used by default to exclude files and dirs
|
||||
# while doing the recursive traversal in the find_iter method of the Node class.
|
||||
exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
|
||||
|
||||
# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
|
||||
# while doing the recursive traversal in the ant_glob method of the Node class.
|
||||
exclude_regs = '''
|
||||
**/*~
|
||||
**/#*#
|
||||
**/.#*
|
||||
**/%*%
|
||||
**/._*
|
||||
**/CVS
|
||||
**/CVS/**
|
||||
**/.cvsignore
|
||||
**/SCCS
|
||||
**/SCCS/**
|
||||
**/vssver.scc
|
||||
**/.svn
|
||||
**/.svn/**
|
||||
**/.git
|
||||
**/.git/**
|
||||
**/.gitignore
|
||||
**/.bzr
|
||||
**/.bzr/**
|
||||
**/.hg
|
||||
**/.hg/**
|
||||
**/_MTN
|
||||
**/_MTN/**
|
||||
**/_darcs
|
||||
**/_darcs/**
|
||||
**/.DS_Store'''
|
||||
|
||||
class Node(object):
|
||||
__slots__ = ("name", "parent", "id", "childs")
|
||||
def __init__(self, name, parent, node_type = UNDEFINED):
|
||||
self.name = name
|
||||
self.parent = parent
|
||||
|
||||
# assumption: one build object at a time
|
||||
self.__class__.bld.id_nodes += 4
|
||||
self.id = self.__class__.bld.id_nodes + node_type
|
||||
|
||||
if node_type == DIR: self.childs = {}
|
||||
|
||||
# We do not want to add another type attribute (memory)
|
||||
# use the id to find out: type = id & 3
|
||||
# for setting: new type = type + x - type & 3
|
||||
|
||||
if parent and name in parent.childs:
|
||||
raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
|
||||
|
||||
if parent: parent.childs[name] = self
|
||||
|
||||
def __setstate__(self, data):
|
||||
if len(data) == 4:
|
||||
(self.parent, self.name, self.id, self.childs) = data
|
||||
else:
|
||||
(self.parent, self.name, self.id) = data
|
||||
|
||||
def __getstate__(self):
|
||||
if getattr(self, 'childs', None) is None:
|
||||
return (self.parent, self.name, self.id)
|
||||
else:
|
||||
return (self.parent, self.name, self.id, self.childs)
|
||||
|
||||
def __str__(self):
|
||||
if not self.parent: return ''
|
||||
return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
def __hash__(self):
|
||||
"expensive, make certain it is not used"
|
||||
raise Utils.WafError('nodes, you are doing it wrong')
|
||||
|
||||
def __copy__(self):
|
||||
"nodes are not supposed to be copied"
|
||||
raise Utils.WafError('nodes are not supposed to be cloned')
|
||||
|
||||
def get_type(self):
|
||||
return self.id & 3
|
||||
|
||||
def set_type(self, t):
|
||||
"dangerous, you are not supposed to use this"
|
||||
self.id = self.id + t - self.id & 3
|
||||
|
||||
def dirs(self):
|
||||
return [x for x in self.childs.values() if x.id & 3 == DIR]
|
||||
|
||||
def files(self):
|
||||
return [x for x in self.childs.values() if x.id & 3 == FILE]
|
||||
|
||||
def get_dir(self, name, default=None):
|
||||
node = self.childs.get(name, None)
|
||||
if not node or node.id & 3 != DIR: return default
|
||||
return node
|
||||
|
||||
def get_file(self, name, default=None):
|
||||
node = self.childs.get(name, None)
|
||||
if not node or node.id & 3 != FILE: return default
|
||||
return node
|
||||
|
||||
def get_build(self, name, default=None):
|
||||
node = self.childs.get(name, None)
|
||||
if not node or node.id & 3 != BUILD: return default
|
||||
return node
|
||||
|
||||
def find_resource(self, lst):
|
||||
"Find an existing input file: either a build node declared previously or a source node"
|
||||
if isinstance(lst, str):
|
||||
lst = Utils.split_path(lst)
|
||||
|
||||
if len(lst) == 1:
|
||||
parent = self
|
||||
else:
|
||||
parent = self.find_dir(lst[:-1])
|
||||
if not parent: return None
|
||||
self.__class__.bld.rescan(parent)
|
||||
|
||||
name = lst[-1]
|
||||
node = parent.childs.get(name, None)
|
||||
if node:
|
||||
tp = node.id & 3
|
||||
if tp == FILE or tp == BUILD:
|
||||
return node
|
||||
else:
|
||||
return None
|
||||
|
||||
tree = self.__class__.bld
|
||||
if not name in tree.cache_dir_contents[parent.id]:
|
||||
return None
|
||||
|
||||
path = parent.abspath() + os.sep + name
|
||||
try:
|
||||
st = Utils.h_file(path)
|
||||
except IOError:
|
||||
return None
|
||||
|
||||
child = self.__class__(name, parent, FILE)
|
||||
tree.node_sigs[0][child.id] = st
|
||||
return child
|
||||
|
||||
def find_or_declare(self, lst):
|
||||
"Used for declaring a build node representing a file being built"
|
||||
if isinstance(lst, str):
|
||||
lst = Utils.split_path(lst)
|
||||
|
||||
if len(lst) == 1:
|
||||
parent = self
|
||||
else:
|
||||
parent = self.find_dir(lst[:-1])
|
||||
if not parent: return None
|
||||
self.__class__.bld.rescan(parent)
|
||||
|
||||
name = lst[-1]
|
||||
node = parent.childs.get(name, None)
|
||||
if node:
|
||||
tp = node.id & 3
|
||||
if tp != BUILD:
|
||||
raise Utils.WafError('find_or_declare found a source file where a build file was expected %r' % '/'.join(lst))
|
||||
return node
|
||||
node = self.__class__(name, parent, BUILD)
|
||||
return node
|
||||
|
||||
def find_dir(self, lst):
|
||||
"search a folder in the filesystem"
|
||||
|
||||
if isinstance(lst, str):
|
||||
lst = Utils.split_path(lst)
|
||||
|
||||
current = self
|
||||
for name in lst:
|
||||
self.__class__.bld.rescan(current)
|
||||
prev = current
|
||||
|
||||
if not current.parent and name == current.name:
|
||||
continue
|
||||
elif not name:
|
||||
continue
|
||||
elif name == '.':
|
||||
continue
|
||||
elif name == '..':
|
||||
current = current.parent or current
|
||||
else:
|
||||
current = prev.childs.get(name, None)
|
||||
if current is None:
|
||||
dir_cont = self.__class__.bld.cache_dir_contents
|
||||
if prev.id in dir_cont and name in dir_cont[prev.id]:
|
||||
if not prev.name:
|
||||
if os.sep == '/':
|
||||
# cygwin //machine/share
|
||||
dirname = os.sep + name
|
||||
else:
|
||||
# windows c:
|
||||
dirname = name
|
||||
else:
|
||||
# regular path
|
||||
dirname = prev.abspath() + os.sep + name
|
||||
if not os.path.isdir(dirname):
|
||||
return None
|
||||
current = self.__class__(name, prev, DIR)
|
||||
elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
|
||||
# drive letter or \\ path for windows
|
||||
current = self.__class__(name, prev, DIR)
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
if current.id & 3 != DIR:
|
||||
return None
|
||||
return current
|
||||
|
||||
def ensure_dir_node_from_path(self, lst):
|
||||
"used very rarely, force the construction of a branch of node instance for representing folders"
|
||||
|
||||
if isinstance(lst, str):
|
||||
lst = Utils.split_path(lst)
|
||||
|
||||
current = self
|
||||
for name in lst:
|
||||
if not name:
|
||||
continue
|
||||
elif name == '.':
|
||||
continue
|
||||
elif name == '..':
|
||||
current = current.parent or current
|
||||
else:
|
||||
prev = current
|
||||
current = prev.childs.get(name, None)
|
||||
if current is None:
|
||||
current = self.__class__(name, prev, DIR)
|
||||
return current
|
||||
|
||||
def exclusive_build_node(self, path):
|
||||
"""
|
||||
create a hierarchy in the build dir (no source folders) for ill-behaving compilers
|
||||
the node is not hashed, so you must do it manually
|
||||
|
||||
after declaring such a node, find_dir and find_resource should work as expected
|
||||
"""
|
||||
lst = Utils.split_path(path)
|
||||
name = lst[-1]
|
||||
if len(lst) > 1:
|
||||
parent = None
|
||||
try:
|
||||
parent = self.find_dir(lst[:-1])
|
||||
except OSError:
|
||||
pass
|
||||
if not parent:
|
||||
parent = self.ensure_dir_node_from_path(lst[:-1])
|
||||
self.__class__.bld.rescan(parent)
|
||||
else:
|
||||
try:
|
||||
self.__class__.bld.rescan(parent)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
parent = self
|
||||
|
||||
node = parent.childs.get(name, None)
|
||||
if not node:
|
||||
node = self.__class__(name, parent, BUILD)
|
||||
|
||||
return node
|
||||
|
||||
def path_to_parent(self, parent):
|
||||
"path relative to a direct ancestor, as string"
|
||||
lst = []
|
||||
p = self
|
||||
h1 = parent.height()
|
||||
h2 = p.height()
|
||||
while h2 > h1:
|
||||
h2 -= 1
|
||||
lst.append(p.name)
|
||||
p = p.parent
|
||||
if lst:
|
||||
lst.reverse()
|
||||
ret = os.path.join(*lst)
|
||||
else:
|
||||
ret = ''
|
||||
return ret
|
||||
|
||||
def find_ancestor(self, node):
|
||||
"find a common ancestor for two nodes - for the shortest path in hierarchy"
|
||||
dist = self.height() - node.height()
|
||||
if dist < 0: return node.find_ancestor(self)
|
||||
# now the real code
|
||||
cand = self
|
||||
while dist > 0:
|
||||
cand = cand.parent
|
||||
dist -= 1
|
||||
if cand == node: return cand
|
||||
cursor = node
|
||||
while cand.parent:
|
||||
cand = cand.parent
|
||||
cursor = cursor.parent
|
||||
if cand == cursor: return cand
|
||||
|
||||
def relpath_gen(self, from_node):
|
||||
"string representing a relative path between self to another node"
|
||||
|
||||
if self == from_node: return '.'
|
||||
if from_node.parent == self: return '..'
|
||||
|
||||
# up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
|
||||
ancestor = self.find_ancestor(from_node)
|
||||
lst = []
|
||||
cand = self
|
||||
while not cand.id == ancestor.id:
|
||||
lst.append(cand.name)
|
||||
cand = cand.parent
|
||||
cand = from_node
|
||||
while not cand.id == ancestor.id:
|
||||
lst.append('..')
|
||||
cand = cand.parent
|
||||
lst.reverse()
|
||||
return os.sep.join(lst)
|
||||
|
||||
def nice_path(self, env=None):
|
||||
"printed in the console, open files easily from the launch directory"
|
||||
tree = self.__class__.bld
|
||||
ln = tree.launch_node()
|
||||
|
||||
if self.id & 3 == FILE: return self.relpath_gen(ln)
|
||||
else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
|
||||
|
||||
def is_child_of(self, node):
|
||||
"does this node belong to the subtree node"
|
||||
p = self
|
||||
diff = self.height() - node.height()
|
||||
while diff > 0:
|
||||
diff -= 1
|
||||
p = p.parent
|
||||
return p.id == node.id
|
||||
|
||||
def variant(self, env):
|
||||
"variant, or output directory for this node, a source has for variant 0"
|
||||
if not env: return 0
|
||||
elif self.id & 3 == FILE: return 0
|
||||
else: return env.variant()
|
||||
|
||||
def height(self):
|
||||
"amount of parents"
|
||||
# README a cache can be added here if necessary
|
||||
d = self
|
||||
val = -1
|
||||
while d:
|
||||
d = d.parent
|
||||
val += 1
|
||||
return val
|
||||
|
||||
# helpers for building things
|
||||
|
||||
def abspath(self, env=None):
|
||||
"""
|
||||
absolute path
|
||||
@param env [Environment]:
|
||||
* obligatory for build nodes: build/variant/src/dir/bar.o
|
||||
* optional for dirs: get either src/dir or build/variant/src/dir
|
||||
* excluded for source nodes: src/dir/bar.c
|
||||
|
||||
Instead of computing the absolute path each time again,
|
||||
store the already-computed absolute paths in one of (variants+1) dictionaries:
|
||||
bld.cache_node_abspath[0] holds absolute paths for source nodes.
|
||||
bld.cache_node_abspath[variant] holds the absolute path for the build nodes
|
||||
which reside in the variant given by env.
|
||||
"""
|
||||
## absolute path - hot zone, so do not touch
|
||||
|
||||
# less expensive
|
||||
variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
|
||||
|
||||
ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
|
||||
if ret: return ret
|
||||
|
||||
if not variant:
|
||||
# source directory
|
||||
if not self.parent:
|
||||
val = os.sep == '/' and os.sep or ''
|
||||
elif not self.parent.name: # root
|
||||
val = (os.sep == '/' and os.sep or '') + self.name
|
||||
else:
|
||||
val = self.parent.abspath() + os.sep + self.name
|
||||
else:
|
||||
# build directory
|
||||
val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
|
||||
self.__class__.bld.cache_node_abspath[variant][self.id] = val
|
||||
return val
|
||||
|
||||
def change_ext(self, ext):
|
||||
"node of the same path, but with a different extension - hot zone so do not touch"
|
||||
name = self.name
|
||||
k = name.rfind('.')
|
||||
if k >= 0:
|
||||
name = name[:k] + ext
|
||||
else:
|
||||
name = name + ext
|
||||
|
||||
return self.parent.find_or_declare([name])
|
||||
|
||||
def src_dir(self, env):
|
||||
"src path without the file name"
|
||||
return self.parent.srcpath(env)
|
||||
|
||||
def bld_dir(self, env):
|
||||
"build path without the file name"
|
||||
return self.parent.bldpath(env)
|
||||
|
||||
def bld_base(self, env):
|
||||
"build path without the extension: src/dir/foo(.cpp)"
|
||||
s = os.path.splitext(self.name)[0]
|
||||
return os.path.join(self.bld_dir(env), s)
|
||||
|
||||
def bldpath(self, env=None):
|
||||
"path seen from the build dir default/src/foo.cpp"
|
||||
if self.id & 3 == FILE:
|
||||
return self.relpath_gen(self.__class__.bld.bldnode)
|
||||
p = self.path_to_parent(self.__class__.bld.srcnode)
|
||||
if p is not '':
|
||||
return env.variant() + os.sep + p
|
||||
return env.variant()
|
||||
|
||||
def srcpath(self, env=None):
|
||||
"path in the srcdir from the build dir ../src/foo.cpp"
|
||||
if self.id & 3 == BUILD:
|
||||
return self.bldpath(env)
|
||||
return self.relpath_gen(self.__class__.bld.bldnode)
|
||||
|
||||
def read(self, env):
|
||||
"get the contents of a file, it is not used anywhere for the moment"
|
||||
return Utils.readf(self.abspath(env))
|
||||
|
||||
def dir(self, env):
|
||||
"scons-like"
|
||||
return self.parent.abspath(env)
|
||||
|
||||
def file(self):
|
||||
"scons-like"
|
||||
return self.name
|
||||
|
||||
def file_base(self):
|
||||
"scons-like"
|
||||
return os.path.splitext(self.name)[0]
|
||||
|
||||
def suffix(self):
|
||||
"scons-like - hot zone so do not touch"
|
||||
k = max(0, self.name.rfind('.'))
|
||||
return self.name[k:]
|
||||
|
||||
def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
|
||||
"""find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
|
||||
bld_ctx = self.__class__.bld
|
||||
bld_ctx.rescan(self)
|
||||
for name in bld_ctx.cache_dir_contents[self.id]:
|
||||
if accept_name(self, name):
|
||||
node = self.find_resource(name)
|
||||
if node:
|
||||
if src and node.id & 3 == FILE:
|
||||
yield node
|
||||
else:
|
||||
node = self.find_dir(name)
|
||||
if node and node.id != bld_ctx.bldnode.id:
|
||||
if dir:
|
||||
yield node
|
||||
if not is_prune(self, name):
|
||||
if maxdepth:
|
||||
for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
|
||||
yield k
|
||||
else:
|
||||
if not is_prune(self, name):
|
||||
node = self.find_resource(name)
|
||||
if not node:
|
||||
# not a file, it is a dir
|
||||
node = self.find_dir(name)
|
||||
if node and node.id != bld_ctx.bldnode.id:
|
||||
if maxdepth:
|
||||
for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
|
||||
yield k
|
||||
|
||||
if bld:
|
||||
for node in self.childs.values():
|
||||
if node.id == bld_ctx.bldnode.id:
|
||||
continue
|
||||
if node.id & 3 == BUILD:
|
||||
if accept_name(self, node.name):
|
||||
yield node
|
||||
raise StopIteration
|
||||
|
||||
def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
|
||||
"""find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
|
||||
|
||||
if not (src or bld or dir):
|
||||
raise StopIteration
|
||||
|
||||
if self.id & 3 != DIR:
|
||||
raise StopIteration
|
||||
|
||||
in_pat = Utils.to_list(in_pat)
|
||||
ex_pat = Utils.to_list(ex_pat)
|
||||
prune_pat = Utils.to_list(prune_pat)
|
||||
|
||||
def accept_name(node, name):
|
||||
for pat in ex_pat:
|
||||
if fnmatch.fnmatchcase(name, pat):
|
||||
return False
|
||||
for pat in in_pat:
|
||||
if fnmatch.fnmatchcase(name, pat):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_prune(node, name):
|
||||
for pat in prune_pat:
|
||||
if fnmatch.fnmatchcase(name, pat):
|
||||
return True
|
||||
return False
|
||||
|
||||
ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
|
||||
if flat:
|
||||
return " ".join([x.relpath_gen(self) for x in ret])
|
||||
|
||||
return ret
|
||||
|
||||
def ant_glob(self, *k, **kw):
|
||||
"""
|
||||
known gotcha: will enumerate the files, but only if the folder exists in the source directory
|
||||
"""
|
||||
|
||||
src=kw.get('src', 1)
|
||||
bld=kw.get('bld', 0)
|
||||
dir=kw.get('dir', 0)
|
||||
excl = kw.get('excl', exclude_regs)
|
||||
incl = k and k[0] or kw.get('incl', '**')
|
||||
|
||||
def to_pat(s):
|
||||
lst = Utils.to_list(s)
|
||||
ret = []
|
||||
for x in lst:
|
||||
x = x.replace('//', '/')
|
||||
if x.endswith('/'):
|
||||
x += '**'
|
||||
lst2 = x.split('/')
|
||||
accu = []
|
||||
for k in lst2:
|
||||
if k == '**':
|
||||
accu.append(k)
|
||||
else:
|
||||
k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
|
||||
k = '^%s$' % k
|
||||
#print "pattern", k
|
||||
accu.append(re.compile(k))
|
||||
ret.append(accu)
|
||||
return ret
|
||||
|
||||
def filtre(name, nn):
|
||||
ret = []
|
||||
for lst in nn:
|
||||
if not lst:
|
||||
pass
|
||||
elif lst[0] == '**':
|
||||
ret.append(lst)
|
||||
if len(lst) > 1:
|
||||
if lst[1].match(name):
|
||||
ret.append(lst[2:])
|
||||
else:
|
||||
ret.append([])
|
||||
elif lst[0].match(name):
|
||||
ret.append(lst[1:])
|
||||
return ret
|
||||
|
||||
def accept(name, pats):
|
||||
nacc = filtre(name, pats[0])
|
||||
nrej = filtre(name, pats[1])
|
||||
if [] in nrej:
|
||||
nacc = []
|
||||
return [nacc, nrej]
|
||||
|
||||
def ant_iter(nodi, maxdepth=25, pats=[]):
|
||||
nodi.__class__.bld.rescan(nodi)
|
||||
tmp = list(nodi.__class__.bld.cache_dir_contents[nodi.id])
|
||||
tmp.sort()
|
||||
for name in tmp:
|
||||
npats = accept(name, pats)
|
||||
if npats and npats[0]:
|
||||
accepted = [] in npats[0]
|
||||
#print accepted, nodi, name
|
||||
|
||||
node = nodi.find_resource(name)
|
||||
if node and accepted:
|
||||
if src and node.id & 3 == FILE:
|
||||
yield node
|
||||
else:
|
||||
node = nodi.find_dir(name)
|
||||
if node and node.id != nodi.__class__.bld.bldnode.id:
|
||||
if accepted and dir:
|
||||
yield node
|
||||
if maxdepth:
|
||||
for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
|
||||
yield k
|
||||
if bld:
|
||||
for node in nodi.childs.values():
|
||||
if node.id == nodi.__class__.bld.bldnode.id:
|
||||
continue
|
||||
if node.id & 3 == BUILD:
|
||||
npats = accept(node.name, pats)
|
||||
if npats and npats[0] and [] in npats[0]:
|
||||
yield node
|
||||
raise StopIteration
|
||||
|
||||
ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
|
||||
|
||||
if kw.get('flat', True):
|
||||
return " ".join([x.relpath_gen(self) for x in ret])
|
||||
|
||||
return ret
|
||||
|
||||
def update_build_dir(self, env=None):
|
||||
|
||||
if not env:
|
||||
for env in self.bld.all_envs:
|
||||
self.update_build_dir(env)
|
||||
return
|
||||
|
||||
path = self.abspath(env)
|
||||
|
||||
lst = Utils.listdir(path)
|
||||
try:
|
||||
self.__class__.bld.cache_dir_contents[self.id].update(lst)
|
||||
except KeyError:
|
||||
self.__class__.bld.cache_dir_contents[self.id] = set(lst)
|
||||
self.__class__.bld.cache_scanned_folders[self.id] = True
|
||||
|
||||
for k in lst:
|
||||
npath = path + os.sep + k
|
||||
st = os.stat(npath)
|
||||
if stat.S_ISREG(st[stat.ST_MODE]):
|
||||
ick = self.find_or_declare(k)
|
||||
if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
|
||||
self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
|
||||
elif stat.S_ISDIR(st[stat.ST_MODE]):
|
||||
child = self.find_dir(k)
|
||||
if not child:
|
||||
child = self.ensure_dir_node_from_path(k)
|
||||
child.update_build_dir(env)
|
||||
|
||||
def read(self, flags='r', encoding='ISO8859-1'):
|
||||
"""backported from waf 1.8"""
|
||||
return Utils.readf(self.abspath(), flags, encoding)
|
||||
|
||||
def write(self, data, flags='w', encoding='ISO8859-1'):
|
||||
"""backported from waf 1.8"""
|
||||
Utils.writef(self.abspath(self.bld.env), data, flags, encoding)
|
||||
|
||||
class Nodu(Node):
|
||||
pass
|
287
third_party/waf/wafadmin/Options.py
vendored
287
third_party/waf/wafadmin/Options.py
vendored
@ -1,287 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Scott Newton, 2005 (scottn)
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"Custom command-line options"
|
||||
|
||||
import os, sys, imp, types, tempfile, optparse
|
||||
import Logs, Utils
|
||||
from Constants import *
|
||||
|
||||
cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
|
||||
|
||||
# TODO remove in waf 1.6 the following two
|
||||
commands = {}
|
||||
is_install = False
|
||||
|
||||
options = {}
|
||||
arg_line = []
|
||||
launch_dir = ''
|
||||
tooldir = ''
|
||||
lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
|
||||
try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
|
||||
except KeyError: cache_global = ''
|
||||
platform = Utils.unversioned_sys_platform()
|
||||
conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
|
||||
|
||||
remote_repo = ['http://waf.googlecode.com/svn/']
|
||||
"""remote directory for the plugins"""
|
||||
|
||||
|
||||
# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
|
||||
default_prefix = os.environ.get('PREFIX')
|
||||
if not default_prefix:
|
||||
if platform == 'win32':
|
||||
d = tempfile.gettempdir()
|
||||
default_prefix = d[0].upper() + d[1:]
|
||||
# win32 preserves the case, but gettempdir does not
|
||||
else: default_prefix = '/usr/local/'
|
||||
|
||||
default_jobs = os.environ.get('JOBS', -1)
|
||||
if default_jobs < 1:
|
||||
try:
|
||||
if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
|
||||
default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
|
||||
else:
|
||||
default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
|
||||
except:
|
||||
if os.name == 'java': # platform.system() == 'Java'
|
||||
from java.lang import Runtime
|
||||
default_jobs = Runtime.getRuntime().availableProcessors()
|
||||
else:
|
||||
# environment var defined on win32
|
||||
default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
|
||||
|
||||
default_destdir = os.environ.get('DESTDIR', '')
|
||||
|
||||
def get_usage(self):
|
||||
cmds_str = []
|
||||
module = Utils.g_module
|
||||
if module:
|
||||
# create the help messages for commands
|
||||
tbl = module.__dict__
|
||||
keys = list(tbl.keys())
|
||||
keys.sort()
|
||||
|
||||
if 'build' in tbl:
|
||||
if not module.build.__doc__:
|
||||
module.build.__doc__ = 'builds the project'
|
||||
if 'configure' in tbl:
|
||||
if not module.configure.__doc__:
|
||||
module.configure.__doc__ = 'configures the project'
|
||||
|
||||
ban = ['set_options', 'init', 'shutdown']
|
||||
|
||||
optlst = [x for x in keys if not x in ban
|
||||
and type(tbl[x]) is type(parse_args_impl)
|
||||
and tbl[x].__doc__
|
||||
and not x.startswith('_')]
|
||||
|
||||
just = max([len(x) for x in optlst])
|
||||
|
||||
for x in optlst:
|
||||
cmds_str.append(' %s: %s' % (x.ljust(just), tbl[x].__doc__))
|
||||
ret = '\n'.join(cmds_str)
|
||||
else:
|
||||
ret = ' '.join(cmds)
|
||||
return '''waf [command] [options]
|
||||
|
||||
Main commands (example: ./waf build -j4)
|
||||
%s
|
||||
''' % ret
|
||||
|
||||
|
||||
setattr(optparse.OptionParser, 'get_usage', get_usage)
|
||||
|
||||
def create_parser(module=None):
|
||||
Logs.debug('options: create_parser is called')
|
||||
parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
|
||||
|
||||
parser.formatter.width = Utils.get_term_cols()
|
||||
p = parser.add_option
|
||||
|
||||
p('-j', '--jobs',
|
||||
type = 'int',
|
||||
default = default_jobs,
|
||||
help = 'amount of parallel jobs (%r)' % default_jobs,
|
||||
dest = 'jobs')
|
||||
|
||||
p('-k', '--keep',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'keep running happily on independent task groups',
|
||||
dest = 'keep')
|
||||
|
||||
p('-v', '--verbose',
|
||||
action = 'count',
|
||||
default = 0,
|
||||
help = 'verbosity level -v -vv or -vvv [default: 0]',
|
||||
dest = 'verbose')
|
||||
|
||||
p('--nocache',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'ignore the WAFCACHE (if set)',
|
||||
dest = 'nocache')
|
||||
|
||||
p('--zones',
|
||||
action = 'store',
|
||||
default = '',
|
||||
help = 'debugging zones (task_gen, deps, tasks, etc)',
|
||||
dest = 'zones')
|
||||
|
||||
p('-p', '--progress',
|
||||
action = 'count',
|
||||
default = 0,
|
||||
help = '-p: progress bar; -pp: ide output',
|
||||
dest = 'progress_bar')
|
||||
|
||||
p('--targets',
|
||||
action = 'store',
|
||||
default = '',
|
||||
help = 'build given task generators, e.g. "target1,target2"',
|
||||
dest = 'compile_targets')
|
||||
|
||||
gr = optparse.OptionGroup(parser, 'configuration options')
|
||||
parser.add_option_group(gr)
|
||||
gr.add_option('-b', '--blddir',
|
||||
action = 'store',
|
||||
default = '',
|
||||
help = 'out dir for the project (configuration)',
|
||||
dest = 'blddir')
|
||||
gr.add_option('-s', '--srcdir',
|
||||
action = 'store',
|
||||
default = '',
|
||||
help = 'top dir for the project (configuration)',
|
||||
dest = 'srcdir')
|
||||
gr.add_option('--prefix',
|
||||
help = 'installation prefix (configuration) [default: %r]' % default_prefix,
|
||||
default = default_prefix,
|
||||
dest = 'prefix')
|
||||
|
||||
gr.add_option('--download',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'try to download the tools if missing',
|
||||
dest = 'download')
|
||||
|
||||
gr = optparse.OptionGroup(parser, 'installation options')
|
||||
parser.add_option_group(gr)
|
||||
gr.add_option('--destdir',
|
||||
help = 'installation root [default: %r]' % default_destdir,
|
||||
default = default_destdir,
|
||||
dest = 'destdir')
|
||||
gr.add_option('-f', '--force',
|
||||
action = 'store_true',
|
||||
default = False,
|
||||
help = 'force file installation',
|
||||
dest = 'force')
|
||||
|
||||
return parser
|
||||
|
||||
def parse_args_impl(parser, _args=None):
|
||||
global options, commands, arg_line
|
||||
(options, args) = parser.parse_args(args=_args)
|
||||
|
||||
arg_line = args
|
||||
#arg_line = args[:] # copy
|
||||
|
||||
# By default, 'waf' is equivalent to 'waf build'
|
||||
commands = {}
|
||||
for var in cmds: commands[var] = 0
|
||||
if not args:
|
||||
commands['build'] = 1
|
||||
args.append('build')
|
||||
|
||||
# Parse the command arguments
|
||||
for arg in args:
|
||||
commands[arg] = True
|
||||
|
||||
# the check thing depends on the build
|
||||
if 'check' in args:
|
||||
idx = args.index('check')
|
||||
try:
|
||||
bidx = args.index('build')
|
||||
if bidx > idx:
|
||||
raise ValueError('build before check')
|
||||
except ValueError, e:
|
||||
args.insert(idx, 'build')
|
||||
|
||||
if args[0] != 'init':
|
||||
args.insert(0, 'init')
|
||||
|
||||
# TODO -k => -j0
|
||||
if options.keep: options.jobs = 1
|
||||
if options.jobs < 1: options.jobs = 1
|
||||
|
||||
if 'install' in sys.argv or 'uninstall' in sys.argv:
|
||||
# absolute path only if set
|
||||
options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
|
||||
|
||||
Logs.verbose = options.verbose
|
||||
Logs.init_log()
|
||||
|
||||
if options.zones:
|
||||
Logs.zones = options.zones.split(',')
|
||||
if not Logs.verbose: Logs.verbose = 1
|
||||
elif Logs.verbose > 0:
|
||||
Logs.zones = ['runner']
|
||||
if Logs.verbose > 2:
|
||||
Logs.zones = ['*']
|
||||
|
||||
# TODO waf 1.6
|
||||
# 1. rename the class to OptionsContext
|
||||
# 2. instead of a class attribute, use a module (static 'parser')
|
||||
# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
|
||||
|
||||
class Handler(Utils.Context):
|
||||
"""loads wscript modules in folders for adding options
|
||||
This class should be named 'OptionsContext'
|
||||
A method named 'recurse' is bound when used by the module Scripting"""
|
||||
|
||||
parser = None
|
||||
# make it possible to access the reference, like Build.bld
|
||||
|
||||
def __init__(self, module=None):
|
||||
self.parser = create_parser(module)
|
||||
self.cwd = os.getcwd()
|
||||
Handler.parser = self
|
||||
|
||||
def add_option(self, *k, **kw):
|
||||
self.parser.add_option(*k, **kw)
|
||||
|
||||
def add_option_group(self, *k, **kw):
|
||||
return self.parser.add_option_group(*k, **kw)
|
||||
|
||||
def get_option_group(self, opt_str):
|
||||
return self.parser.get_option_group(opt_str)
|
||||
|
||||
def sub_options(self, *k, **kw):
|
||||
if not k: raise Utils.WscriptError('folder expected')
|
||||
self.recurse(k[0], name='set_options')
|
||||
|
||||
def tool_options(self, *k, **kw):
|
||||
Utils.python_24_guard()
|
||||
|
||||
if not k[0]:
|
||||
raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
|
||||
tools = Utils.to_list(k[0])
|
||||
|
||||
# TODO waf 1.6 remove the global variable tooldir
|
||||
path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
|
||||
|
||||
for tool in tools:
|
||||
tool = tool.replace('++', 'xx')
|
||||
if tool == 'java': tool = 'javaw'
|
||||
if tool.lower() == 'unittest': tool = 'unittestw'
|
||||
module = Utils.load_tool(tool, path)
|
||||
try:
|
||||
fun = module.set_options
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
fun(kw.get('option_group', self))
|
||||
|
||||
def parse_args(self, args=None):
|
||||
parse_args_impl(self.parser, args)
|
235
third_party/waf/wafadmin/Runner.py
vendored
235
third_party/waf/wafadmin/Runner.py
vendored
@ -1,235 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2008 (ita)
|
||||
|
||||
"Execute the tasks"
|
||||
|
||||
import os, sys, random, time, threading, traceback
|
||||
try: from Queue import Queue
|
||||
except ImportError: from queue import Queue
|
||||
import Build, Utils, Logs, Options
|
||||
from Logs import debug, error
|
||||
from Constants import *
|
||||
|
||||
GAP = 15
|
||||
|
||||
run_old = threading.Thread.run
|
||||
def run(*args, **kwargs):
|
||||
try:
|
||||
run_old(*args, **kwargs)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
sys.excepthook(*sys.exc_info())
|
||||
threading.Thread.run = run
|
||||
|
||||
def process_task(tsk):
|
||||
|
||||
m = tsk.master
|
||||
if m.stop:
|
||||
m.out.put(tsk)
|
||||
return
|
||||
|
||||
try:
|
||||
tsk.generator.bld.printout(tsk.display())
|
||||
if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
|
||||
# actual call to task's run() function
|
||||
else: ret = tsk.call_run()
|
||||
except Exception, e:
|
||||
tsk.err_msg = Utils.ex_stack()
|
||||
tsk.hasrun = EXCEPTION
|
||||
|
||||
# TODO cleanup
|
||||
m.error_handler(tsk)
|
||||
m.out.put(tsk)
|
||||
return
|
||||
|
||||
if ret:
|
||||
tsk.err_code = ret
|
||||
tsk.hasrun = CRASHED
|
||||
else:
|
||||
try:
|
||||
tsk.post_run()
|
||||
except Utils.WafError:
|
||||
pass
|
||||
except Exception:
|
||||
tsk.err_msg = Utils.ex_stack()
|
||||
tsk.hasrun = EXCEPTION
|
||||
else:
|
||||
tsk.hasrun = SUCCESS
|
||||
if tsk.hasrun != SUCCESS:
|
||||
m.error_handler(tsk)
|
||||
|
||||
m.out.put(tsk)
|
||||
|
||||
class TaskConsumer(threading.Thread):
|
||||
ready = Queue(0)
|
||||
consumers = []
|
||||
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(1)
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.loop()
|
||||
except:
|
||||
pass
|
||||
|
||||
def loop(self):
|
||||
while 1:
|
||||
tsk = TaskConsumer.ready.get()
|
||||
process_task(tsk)
|
||||
|
||||
class Parallel(object):
|
||||
"""
|
||||
keep the consumer threads busy, and avoid consuming cpu cycles
|
||||
when no more tasks can be added (end of the build, etc)
|
||||
"""
|
||||
def __init__(self, bld, j=2):
|
||||
|
||||
# number of consumers
|
||||
self.numjobs = j
|
||||
|
||||
self.manager = bld.task_manager
|
||||
self.manager.current_group = 0
|
||||
|
||||
self.total = self.manager.total()
|
||||
|
||||
# tasks waiting to be processed - IMPORTANT
|
||||
self.outstanding = []
|
||||
self.maxjobs = MAXJOBS
|
||||
|
||||
# tasks that are awaiting for another task to complete
|
||||
self.frozen = []
|
||||
|
||||
# tasks returned by the consumers
|
||||
self.out = Queue(0)
|
||||
|
||||
self.count = 0 # tasks not in the producer area
|
||||
|
||||
self.processed = 1 # progress indicator
|
||||
|
||||
self.stop = False # error condition to stop the build
|
||||
self.error = False # error flag
|
||||
|
||||
def get_next(self):
|
||||
"override this method to schedule the tasks in a particular order"
|
||||
if not self.outstanding:
|
||||
return None
|
||||
return self.outstanding.pop(0)
|
||||
|
||||
def postpone(self, tsk):
|
||||
"override this method to schedule the tasks in a particular order"
|
||||
# TODO consider using a deque instead
|
||||
if random.randint(0, 1):
|
||||
self.frozen.insert(0, tsk)
|
||||
else:
|
||||
self.frozen.append(tsk)
|
||||
|
||||
def refill_task_list(self):
|
||||
"called to set the next group of tasks"
|
||||
|
||||
while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
|
||||
self.get_out()
|
||||
|
||||
while not self.outstanding:
|
||||
if self.count:
|
||||
self.get_out()
|
||||
|
||||
if self.frozen:
|
||||
self.outstanding += self.frozen
|
||||
self.frozen = []
|
||||
elif not self.count:
|
||||
(jobs, tmp) = self.manager.get_next_set()
|
||||
if jobs != None: self.maxjobs = jobs
|
||||
if tmp: self.outstanding += tmp
|
||||
break
|
||||
|
||||
def get_out(self):
|
||||
"the tasks that are put to execute are all collected using get_out"
|
||||
ret = self.out.get()
|
||||
self.manager.add_finished(ret)
|
||||
if not self.stop and getattr(ret, 'more_tasks', None):
|
||||
self.outstanding += ret.more_tasks
|
||||
self.total += len(ret.more_tasks)
|
||||
self.count -= 1
|
||||
|
||||
def error_handler(self, tsk):
|
||||
"by default, errors make the build stop (not thread safe so be careful)"
|
||||
if not Options.options.keep:
|
||||
self.stop = True
|
||||
self.error = True
|
||||
|
||||
def start(self):
|
||||
"execute the tasks"
|
||||
|
||||
if TaskConsumer.consumers:
|
||||
# the worker pool is usually loaded lazily (see below)
|
||||
# in case it is re-used with a different value of numjobs:
|
||||
while len(TaskConsumer.consumers) < self.numjobs:
|
||||
TaskConsumer.consumers.append(TaskConsumer())
|
||||
|
||||
while not self.stop:
|
||||
|
||||
self.refill_task_list()
|
||||
|
||||
# consider the next task
|
||||
tsk = self.get_next()
|
||||
if not tsk:
|
||||
if self.count:
|
||||
# tasks may add new ones after they are run
|
||||
continue
|
||||
else:
|
||||
# no tasks to run, no tasks running, time to exit
|
||||
break
|
||||
|
||||
if tsk.hasrun:
|
||||
# if the task is marked as "run", just skip it
|
||||
self.processed += 1
|
||||
self.manager.add_finished(tsk)
|
||||
continue
|
||||
|
||||
try:
|
||||
st = tsk.runnable_status()
|
||||
except Exception, e:
|
||||
self.processed += 1
|
||||
if self.stop and not Options.options.keep:
|
||||
tsk.hasrun = SKIPPED
|
||||
self.manager.add_finished(tsk)
|
||||
continue
|
||||
self.error_handler(tsk)
|
||||
self.manager.add_finished(tsk)
|
||||
tsk.hasrun = EXCEPTION
|
||||
tsk.err_msg = Utils.ex_stack()
|
||||
continue
|
||||
|
||||
if st == ASK_LATER:
|
||||
self.postpone(tsk)
|
||||
elif st == SKIP_ME:
|
||||
self.processed += 1
|
||||
tsk.hasrun = SKIPPED
|
||||
self.manager.add_finished(tsk)
|
||||
else:
|
||||
# run me: put the task in ready queue
|
||||
tsk.position = (self.processed, self.total)
|
||||
self.count += 1
|
||||
tsk.master = self
|
||||
self.processed += 1
|
||||
|
||||
if self.numjobs == 1:
|
||||
process_task(tsk)
|
||||
else:
|
||||
TaskConsumer.ready.put(tsk)
|
||||
# create the consumer threads only if there is something to consume
|
||||
if not TaskConsumer.consumers:
|
||||
TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
|
||||
|
||||
# self.count represents the tasks that have been made available to the consumer threads
|
||||
# collect all the tasks after an error else the message may be incomplete
|
||||
while self.error and self.count:
|
||||
self.get_out()
|
||||
|
||||
#print loop
|
||||
assert (self.count == 0 or self.stop)
|
585
third_party/waf/wafadmin/Scripting.py
vendored
585
third_party/waf/wafadmin/Scripting.py
vendored
@ -1,585 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005 (ita)
|
||||
|
||||
"Module called for configuring, compiling and installing targets"
|
||||
|
||||
import os, sys, shutil, traceback, datetime, inspect, errno
|
||||
|
||||
import Utils, Configure, Build, Logs, Options, Environment, Task
|
||||
from Logs import error, warn, info
|
||||
from Constants import *
|
||||
|
||||
g_gz = 'bz2'
|
||||
commands = []
|
||||
|
||||
def prepare_impl(t, cwd, ver, wafdir):
|
||||
Options.tooldir = [t]
|
||||
Options.launch_dir = cwd
|
||||
|
||||
# some command-line options can be processed immediately
|
||||
if '--version' in sys.argv:
|
||||
opt_obj = Options.Handler()
|
||||
opt_obj.curdir = cwd
|
||||
opt_obj.parse_args()
|
||||
sys.exit(0)
|
||||
|
||||
# now find the wscript file
|
||||
msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
|
||||
|
||||
# in theory projects can be configured in an autotool-like manner:
|
||||
# mkdir build && cd build && ../waf configure && ../waf
|
||||
build_dir_override = None
|
||||
candidate = None
|
||||
|
||||
lst = os.listdir(cwd)
|
||||
|
||||
search_for_candidate = True
|
||||
if WSCRIPT_FILE in lst:
|
||||
candidate = cwd
|
||||
|
||||
elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
|
||||
# autotool-like configuration
|
||||
calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
|
||||
if WSCRIPT_FILE in os.listdir(calldir):
|
||||
candidate = calldir
|
||||
search_for_candidate = False
|
||||
else:
|
||||
error('arg[0] directory does not contain a wscript file')
|
||||
sys.exit(1)
|
||||
build_dir_override = cwd
|
||||
|
||||
# climb up to find a script if it is not found
|
||||
while search_for_candidate:
|
||||
if len(cwd) <= 3:
|
||||
break # stop at / or c:
|
||||
dirlst = os.listdir(cwd)
|
||||
if WSCRIPT_FILE in dirlst:
|
||||
candidate = cwd
|
||||
if 'configure' in sys.argv and candidate:
|
||||
break
|
||||
if Options.lockfile in dirlst:
|
||||
env = Environment.Environment()
|
||||
try:
|
||||
env.load(os.path.join(cwd, Options.lockfile))
|
||||
except:
|
||||
error('could not load %r' % Options.lockfile)
|
||||
try:
|
||||
os.stat(env['cwd'])
|
||||
except:
|
||||
candidate = cwd
|
||||
else:
|
||||
candidate = env['cwd']
|
||||
break
|
||||
cwd = os.path.dirname(cwd) # climb up
|
||||
|
||||
if not candidate:
|
||||
# check if the user only wanted to display the help
|
||||
if '-h' in sys.argv or '--help' in sys.argv:
|
||||
warn('No wscript file found: the help message may be incomplete')
|
||||
opt_obj = Options.Handler()
|
||||
opt_obj.curdir = cwd
|
||||
opt_obj.parse_args()
|
||||
else:
|
||||
error(msg1)
|
||||
sys.exit(0)
|
||||
|
||||
# We have found wscript, but there is no guarantee that it is valid
|
||||
try:
|
||||
os.chdir(candidate)
|
||||
except OSError:
|
||||
raise Utils.WafError("the folder %r is unreadable" % candidate)
|
||||
|
||||
# define the main module containing the functions init, shutdown, ..
|
||||
Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
|
||||
|
||||
if build_dir_override:
|
||||
d = getattr(Utils.g_module, BLDDIR, None)
|
||||
if d:
|
||||
# test if user has set the blddir in wscript.
|
||||
msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
|
||||
warn(msg)
|
||||
Utils.g_module.blddir = build_dir_override
|
||||
|
||||
# bind a few methods and classes by default
|
||||
|
||||
def set_def(obj, name=''):
|
||||
n = name or obj.__name__
|
||||
if not n in Utils.g_module.__dict__:
|
||||
setattr(Utils.g_module, n, obj)
|
||||
|
||||
for k in [dist, distclean, distcheck, clean, install, uninstall]:
|
||||
set_def(k)
|
||||
|
||||
set_def(Configure.ConfigurationContext, 'configure_context')
|
||||
|
||||
for k in ['build', 'clean', 'install', 'uninstall']:
|
||||
set_def(Build.BuildContext, k + '_context')
|
||||
|
||||
# now parse the options from the user wscript file
|
||||
opt_obj = Options.Handler(Utils.g_module)
|
||||
opt_obj.curdir = candidate
|
||||
try:
|
||||
f = Utils.g_module.set_options
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
opt_obj.sub_options([''])
|
||||
opt_obj.parse_args()
|
||||
|
||||
if not 'init' in Utils.g_module.__dict__:
|
||||
Utils.g_module.init = Utils.nada
|
||||
if not 'shutdown' in Utils.g_module.__dict__:
|
||||
Utils.g_module.shutdown = Utils.nada
|
||||
|
||||
main()
|
||||
|
||||
def prepare(t, cwd, ver, wafdir):
|
||||
if WAFVERSION != ver:
|
||||
msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
|
||||
print('\033[91mError: %s\033[0m' % msg)
|
||||
sys.exit(1)
|
||||
|
||||
#"""
|
||||
try:
|
||||
prepare_impl(t, cwd, ver, wafdir)
|
||||
except Utils.WafError, e:
|
||||
error(str(e))
|
||||
sys.exit(1)
|
||||
except KeyboardInterrupt:
|
||||
Utils.pprint('RED', 'Interrupted')
|
||||
sys.exit(68)
|
||||
"""
|
||||
import cProfile, pstats
|
||||
cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
|
||||
{'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
|
||||
'profi.txt')
|
||||
p = pstats.Stats('profi.txt')
|
||||
p.sort_stats('time').print_stats(45)
|
||||
#"""
|
||||
|
||||
def main():
|
||||
global commands
|
||||
commands = Options.arg_line[:]
|
||||
|
||||
while commands:
|
||||
x = commands.pop(0)
|
||||
|
||||
ini = datetime.datetime.now()
|
||||
if x == 'configure':
|
||||
fun = configure
|
||||
elif x == 'build':
|
||||
fun = build
|
||||
else:
|
||||
fun = getattr(Utils.g_module, x, None)
|
||||
|
||||
if not fun:
|
||||
raise Utils.WscriptError('No such command %r' % x)
|
||||
|
||||
ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
|
||||
|
||||
if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
|
||||
# compatibility TODO remove in waf 1.6
|
||||
try:
|
||||
fun(ctx)
|
||||
except TypeError:
|
||||
fun()
|
||||
else:
|
||||
fun(ctx)
|
||||
|
||||
ela = ''
|
||||
if not Options.options.progress_bar:
|
||||
ela = ' (%s)' % Utils.get_elapsed_time(ini)
|
||||
|
||||
if x != 'init' and x != 'shutdown':
|
||||
info('%r finished successfully%s' % (x, ela))
|
||||
|
||||
if not commands and x != 'shutdown':
|
||||
commands.append('shutdown')
|
||||
|
||||
def configure(conf):
|
||||
|
||||
src = getattr(Options.options, SRCDIR, None)
|
||||
if not src: src = getattr(Utils.g_module, SRCDIR, None)
|
||||
if not src: src = getattr(Utils.g_module, 'top', None)
|
||||
if not src:
|
||||
src = '.'
|
||||
incomplete_src = 1
|
||||
src = os.path.abspath(src)
|
||||
|
||||
bld = getattr(Options.options, BLDDIR, None)
|
||||
if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
|
||||
if not bld: bld = getattr(Utils.g_module, 'out', None)
|
||||
if not bld:
|
||||
bld = 'build'
|
||||
incomplete_bld = 1
|
||||
if bld == '.':
|
||||
raise Utils.WafError('Setting blddir="." may cause distclean problems')
|
||||
bld = os.path.abspath(bld)
|
||||
|
||||
try: os.makedirs(bld)
|
||||
except OSError: pass
|
||||
|
||||
# It is not possible to compile specific targets in the configuration
|
||||
# this may cause configuration errors if autoconfig is set
|
||||
targets = Options.options.compile_targets
|
||||
Options.options.compile_targets = None
|
||||
Options.is_install = False
|
||||
|
||||
conf.srcdir = src
|
||||
conf.blddir = bld
|
||||
conf.post_init()
|
||||
|
||||
if 'incomplete_src' in vars():
|
||||
conf.check_message_1('Setting srcdir to')
|
||||
conf.check_message_2(src)
|
||||
if 'incomplete_bld' in vars():
|
||||
conf.check_message_1('Setting blddir to')
|
||||
conf.check_message_2(bld)
|
||||
|
||||
# calling to main wscript's configure()
|
||||
conf.sub_config([''])
|
||||
|
||||
conf.store()
|
||||
|
||||
# this will write a configure lock so that subsequent builds will
|
||||
# consider the current path as the root directory (see prepare_impl).
|
||||
# to remove: use 'waf distclean'
|
||||
env = Environment.Environment()
|
||||
env[BLDDIR] = bld
|
||||
env[SRCDIR] = src
|
||||
env['argv'] = sys.argv
|
||||
env['commands'] = Options.commands
|
||||
env['options'] = Options.options.__dict__
|
||||
|
||||
# conf.hash & conf.files hold wscript files paths and hash
|
||||
# (used only by Configure.autoconfig)
|
||||
env['hash'] = conf.hash
|
||||
env['files'] = conf.files
|
||||
env['environ'] = dict(conf.environ)
|
||||
env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
|
||||
|
||||
if Utils.g_module.root_path != src:
|
||||
# in case the source dir is somewhere else
|
||||
env.store(os.path.join(src, Options.lockfile))
|
||||
|
||||
env.store(Options.lockfile)
|
||||
|
||||
Options.options.compile_targets = targets
|
||||
|
||||
def clean(bld):
|
||||
'''removes the build files'''
|
||||
try:
|
||||
proj = Environment.Environment(Options.lockfile)
|
||||
except IOError:
|
||||
raise Utils.WafError('Nothing to clean (project not configured)')
|
||||
|
||||
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
|
||||
bld.load_envs()
|
||||
|
||||
bld.is_install = 0 # False
|
||||
|
||||
# read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
|
||||
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
|
||||
|
||||
try:
|
||||
bld.clean()
|
||||
finally:
|
||||
bld.save()
|
||||
|
||||
def check_configured(bld):
|
||||
if not Configure.autoconfig:
|
||||
return bld
|
||||
|
||||
conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
|
||||
bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
|
||||
|
||||
def reconf(proj):
|
||||
back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
|
||||
|
||||
Options.commands = proj['commands']
|
||||
Options.options.__dict__ = proj['options']
|
||||
conf = conf_cls()
|
||||
conf.environ = proj['environ']
|
||||
configure(conf)
|
||||
|
||||
(Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
|
||||
|
||||
try:
|
||||
proj = Environment.Environment(Options.lockfile)
|
||||
except IOError:
|
||||
conf = conf_cls()
|
||||
configure(conf)
|
||||
else:
|
||||
try:
|
||||
bld = bld_cls()
|
||||
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
|
||||
bld.load_envs()
|
||||
except Utils.WafError:
|
||||
reconf(proj)
|
||||
return bld_cls()
|
||||
|
||||
try:
|
||||
proj = Environment.Environment(Options.lockfile)
|
||||
except IOError:
|
||||
raise Utils.WafError('Auto-config: project does not configure (bug)')
|
||||
|
||||
h = 0
|
||||
try:
|
||||
for file in proj['files']:
|
||||
if file.endswith('configure'):
|
||||
h = hash((h, Utils.readf(file)))
|
||||
else:
|
||||
mod = Utils.load_module(file)
|
||||
h = hash((h, mod.waf_hash_val))
|
||||
except (OSError, IOError):
|
||||
warn('Reconfiguring the project: a file is unavailable')
|
||||
reconf(proj)
|
||||
else:
|
||||
if (h != proj['hash']):
|
||||
warn('Reconfiguring the project: the configuration has changed')
|
||||
reconf(proj)
|
||||
|
||||
return bld_cls()
|
||||
|
||||
def install(bld):
|
||||
'''installs the build files'''
|
||||
bld = check_configured(bld)
|
||||
|
||||
Options.commands['install'] = True
|
||||
Options.commands['uninstall'] = False
|
||||
Options.is_install = True
|
||||
|
||||
bld.is_install = INSTALL
|
||||
|
||||
build_impl(bld)
|
||||
bld.install()
|
||||
|
||||
def uninstall(bld):
|
||||
'''removes the installed files'''
|
||||
Options.commands['install'] = False
|
||||
Options.commands['uninstall'] = True
|
||||
Options.is_install = True
|
||||
|
||||
bld.is_install = UNINSTALL
|
||||
|
||||
try:
|
||||
def runnable_status(self):
|
||||
return SKIP_ME
|
||||
setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
|
||||
setattr(Task.Task, 'runnable_status', runnable_status)
|
||||
|
||||
build_impl(bld)
|
||||
bld.install()
|
||||
finally:
|
||||
setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
|
||||
|
||||
def build(bld):
|
||||
bld = check_configured(bld)
|
||||
|
||||
Options.commands['install'] = False
|
||||
Options.commands['uninstall'] = False
|
||||
Options.is_install = False
|
||||
|
||||
bld.is_install = 0 # False
|
||||
|
||||
return build_impl(bld)
|
||||
|
||||
def build_impl(bld):
|
||||
# compile the project and/or install the files
|
||||
try:
|
||||
proj = Environment.Environment(Options.lockfile)
|
||||
except IOError:
|
||||
raise Utils.WafError("Project not configured (run 'waf configure' first)")
|
||||
|
||||
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
|
||||
bld.load_envs()
|
||||
|
||||
info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
|
||||
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
|
||||
|
||||
# execute something immediately before the build starts
|
||||
bld.pre_build()
|
||||
|
||||
try:
|
||||
bld.compile()
|
||||
finally:
|
||||
if Options.options.progress_bar: print('')
|
||||
info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
|
||||
|
||||
# execute something immediately after a successful build
|
||||
bld.post_build()
|
||||
|
||||
bld.install()
|
||||
|
||||
excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
|
||||
dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
|
||||
def dont_dist(name, src, build_dir):
|
||||
global excludes, dist_exts
|
||||
|
||||
if (name.startswith(',,')
|
||||
or name.startswith('++')
|
||||
or name.startswith('.waf')
|
||||
or (src == '.' and name == Options.lockfile)
|
||||
or name in excludes
|
||||
or name == build_dir
|
||||
):
|
||||
return True
|
||||
|
||||
for ext in dist_exts:
|
||||
if name.endswith(ext):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# like shutil.copytree
|
||||
# exclude files and to raise exceptions immediately
|
||||
def copytree(src, dst, build_dir):
|
||||
names = os.listdir(src)
|
||||
os.makedirs(dst)
|
||||
for name in names:
|
||||
srcname = os.path.join(src, name)
|
||||
dstname = os.path.join(dst, name)
|
||||
|
||||
if dont_dist(name, src, build_dir):
|
||||
continue
|
||||
|
||||
if os.path.isdir(srcname):
|
||||
copytree(srcname, dstname, build_dir)
|
||||
else:
|
||||
shutil.copy2(srcname, dstname)
|
||||
|
||||
# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
|
||||
def distclean(ctx=None):
|
||||
'''removes the build directory'''
|
||||
global commands
|
||||
lst = os.listdir('.')
|
||||
for f in lst:
|
||||
if f == Options.lockfile:
|
||||
try:
|
||||
proj = Environment.Environment(f)
|
||||
except:
|
||||
Logs.warn('could not read %r' % f)
|
||||
continue
|
||||
|
||||
try:
|
||||
shutil.rmtree(proj[BLDDIR])
|
||||
except IOError:
|
||||
pass
|
||||
except OSError, e:
|
||||
if e.errno != errno.ENOENT:
|
||||
Logs.warn('project %r cannot be removed' % proj[BLDDIR])
|
||||
|
||||
try:
|
||||
os.remove(f)
|
||||
except OSError, e:
|
||||
if e.errno != errno.ENOENT:
|
||||
Logs.warn('file %r cannot be removed' % f)
|
||||
|
||||
# remove the local waf cache
|
||||
if not commands and f.startswith('.waf'):
|
||||
shutil.rmtree(f, ignore_errors=True)
|
||||
|
||||
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
|
||||
def dist(appname='', version=''):
|
||||
'''makes a tarball for redistributing the sources'''
|
||||
# return return (distdirname, tarballname)
|
||||
import tarfile
|
||||
|
||||
if not appname: appname = Utils.g_module.APPNAME
|
||||
if not version: version = Utils.g_module.VERSION
|
||||
|
||||
tmp_folder = appname + '-' + version
|
||||
if g_gz in ['gz', 'bz2']:
|
||||
arch_name = tmp_folder + '.tar.' + g_gz
|
||||
else:
|
||||
arch_name = tmp_folder + '.' + 'zip'
|
||||
|
||||
# remove the previous dir
|
||||
try:
|
||||
shutil.rmtree(tmp_folder)
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
# remove the previous archive
|
||||
try:
|
||||
os.remove(arch_name)
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
|
||||
# copy the files into the temporary folder
|
||||
blddir = getattr(Utils.g_module, BLDDIR, None)
|
||||
if not blddir:
|
||||
blddir = getattr(Utils.g_module, 'out', None)
|
||||
copytree('.', tmp_folder, blddir)
|
||||
|
||||
# undocumented hook for additional cleanup
|
||||
dist_hook = getattr(Utils.g_module, 'dist_hook', None)
|
||||
if dist_hook:
|
||||
back = os.getcwd()
|
||||
os.chdir(tmp_folder)
|
||||
try:
|
||||
dist_hook()
|
||||
finally:
|
||||
# go back to the root directory
|
||||
os.chdir(back)
|
||||
|
||||
if g_gz in ['gz', 'bz2']:
|
||||
tar = tarfile.open(arch_name, 'w:' + g_gz)
|
||||
tar.add(tmp_folder)
|
||||
tar.close()
|
||||
else:
|
||||
Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
|
||||
|
||||
try: from hashlib import sha1 as sha
|
||||
except ImportError: from sha import sha
|
||||
try:
|
||||
digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
|
||||
except:
|
||||
digest = ''
|
||||
|
||||
info('New archive created: %s%s' % (arch_name, digest))
|
||||
|
||||
if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
|
||||
return arch_name
|
||||
|
||||
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
|
||||
def distcheck(appname='', version='', subdir=''):
|
||||
'''checks if the sources compile (tarball from 'dist')'''
|
||||
import tempfile, tarfile
|
||||
|
||||
if not appname: appname = Utils.g_module.APPNAME
|
||||
if not version: version = Utils.g_module.VERSION
|
||||
|
||||
waf = os.path.abspath(sys.argv[0])
|
||||
tarball = dist(appname, version)
|
||||
|
||||
path = appname + '-' + version
|
||||
|
||||
# remove any previous instance
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
t = tarfile.open(tarball)
|
||||
for x in t: t.extract(x)
|
||||
t.close()
|
||||
|
||||
# build_path is the directory for the waf invocation
|
||||
if subdir:
|
||||
build_path = os.path.join(path, subdir)
|
||||
else:
|
||||
build_path = path
|
||||
|
||||
instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
|
||||
ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
|
||||
if ret:
|
||||
raise Utils.WafError('distcheck failed with code %i' % ret)
|
||||
|
||||
if os.path.exists(instdir):
|
||||
raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
|
||||
|
||||
shutil.rmtree(path)
|
||||
|
||||
# FIXME remove in Waf 1.6 (kept for compatibility)
|
||||
def add_subdir(dir, bld):
|
||||
bld.recurse(dir, 'build')
|
1199
third_party/waf/wafadmin/Task.py
vendored
1199
third_party/waf/wafadmin/Task.py
vendored
File diff suppressed because it is too large
Load Diff
614
third_party/waf/wafadmin/TaskGen.py
vendored
614
third_party/waf/wafadmin/TaskGen.py
vendored
@ -1,614 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2008 (ita)
|
||||
|
||||
"""
|
||||
The class task_gen encapsulates the creation of task objects (low-level code)
|
||||
The instances can have various parameters, but the creation of task nodes (Task.py)
|
||||
is delayed. To achieve this, various methods are called from the method "apply"
|
||||
|
||||
The class task_gen contains lots of methods, and a configuration table:
|
||||
* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
|
||||
* the order of the methods (self.prec or by default task_gen.prec) is configurable
|
||||
* new methods can be inserted dynamically without pasting old code
|
||||
|
||||
Additionally, task_gen provides the method apply_core
|
||||
* file extensions are mapped to methods: def meth(self, name_or_node)
|
||||
* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
|
||||
* when called, the functions may modify self.allnodes to re-add source to process
|
||||
* the mappings can map an extension or a filename (see the code below)
|
||||
|
||||
WARNING: subclasses must reimplement the clone method
|
||||
"""
|
||||
|
||||
import os, traceback, copy
|
||||
import Build, Task, Utils, Logs, Options
|
||||
from Logs import debug, error, warn
|
||||
from Constants import *
|
||||
|
||||
typos = {
|
||||
'sources':'source',
|
||||
'targets':'target',
|
||||
'include':'includes',
|
||||
'define':'defines',
|
||||
'importpath':'importpaths',
|
||||
'install_var':'install_path',
|
||||
'install_subdir':'install_path',
|
||||
'inst_var':'install_path',
|
||||
'inst_dir':'install_path',
|
||||
'feature':'features',
|
||||
}
|
||||
|
||||
class register_obj(type):
|
||||
"""no decorators for classes, so we use a metaclass
|
||||
we store into task_gen.classes the classes that inherit task_gen
|
||||
and whose names end in '_taskgen'
|
||||
"""
|
||||
def __init__(cls, name, bases, dict):
|
||||
super(register_obj, cls).__init__(name, bases, dict)
|
||||
name = cls.__name__
|
||||
suffix = '_taskgen'
|
||||
if name.endswith(suffix):
|
||||
task_gen.classes[name.replace(suffix, '')] = cls
|
||||
|
||||
class task_gen(object):
|
||||
"""
|
||||
Most methods are of the form 'def meth(self):' without any parameters
|
||||
there are many of them, and they do many different things:
|
||||
* task creation
|
||||
* task results installation
|
||||
* environment modification
|
||||
* attribute addition/removal
|
||||
|
||||
The inheritance approach is complicated
|
||||
* mixing several languages at once
|
||||
* subclassing is needed even for small changes
|
||||
* inserting new methods is complicated
|
||||
|
||||
This new class uses a configuration table:
|
||||
* adding new methods easily
|
||||
* obtaining the order in which to call the methods
|
||||
* postponing the method calls (post() -> apply)
|
||||
|
||||
Additionally, a 'traits' static attribute is provided:
|
||||
* this list contains methods
|
||||
* the methods can remove or add methods from self.meths
|
||||
Example1: the attribute 'staticlib' is set on an instance
|
||||
a method set in the list of traits is executed when the
|
||||
instance is posted, it finds that flag and adds another method for execution
|
||||
Example2: a method set in the list of traits finds the msvc
|
||||
compiler (from self.env['MSVC']==1); more methods are added to self.meths
|
||||
"""
|
||||
|
||||
__metaclass__ = register_obj
|
||||
mappings = {}
|
||||
mapped = {}
|
||||
prec = Utils.DefaultDict(list)
|
||||
traits = Utils.DefaultDict(set)
|
||||
classes = {}
|
||||
|
||||
def __init__(self, *kw, **kwargs):
|
||||
self.prec = Utils.DefaultDict(list)
|
||||
"map precedence of function names to call"
|
||||
# so we will have to play with directed acyclic graphs
|
||||
# detect cycles, etc
|
||||
|
||||
self.source = ''
|
||||
self.target = ''
|
||||
|
||||
# list of methods to execute - does not touch it by hand unless you know
|
||||
self.meths = []
|
||||
|
||||
# list of mappings extension -> function
|
||||
self.mappings = {}
|
||||
|
||||
# list of features (see the documentation on traits)
|
||||
self.features = list(kw)
|
||||
|
||||
# not always a good idea
|
||||
self.tasks = []
|
||||
|
||||
self.default_chmod = O644
|
||||
self.default_install_path = None
|
||||
|
||||
# kind of private, beware of what you put in it, also, the contents are consumed
|
||||
self.allnodes = []
|
||||
|
||||
self.bld = kwargs.get('bld', Build.bld)
|
||||
self.env = self.bld.env.copy()
|
||||
|
||||
self.path = self.bld.path # emulate chdir when reading scripts
|
||||
self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
|
||||
|
||||
# provide a unique id
|
||||
self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
|
||||
|
||||
for key, val in kwargs.iteritems():
|
||||
setattr(self, key, val)
|
||||
|
||||
self.bld.task_manager.add_task_gen(self)
|
||||
self.bld.all_task_gen.append(self)
|
||||
|
||||
def __str__(self):
|
||||
return ("<task_gen '%s' of type %s defined in %s>"
|
||||
% (self.name or self.target, self.__class__.__name__, str(self.path)))
|
||||
|
||||
def __setattr__(self, name, attr):
|
||||
real = typos.get(name, name)
|
||||
if real != name:
|
||||
warn('typo %s -> %s' % (name, real))
|
||||
if Logs.verbose > 0:
|
||||
traceback.print_stack()
|
||||
object.__setattr__(self, real, attr)
|
||||
|
||||
def to_list(self, value):
|
||||
"helper: returns a list"
|
||||
if isinstance(value, str): return value.split()
|
||||
else: return value
|
||||
|
||||
def apply(self):
|
||||
"order the methods to execute using self.prec or task_gen.prec"
|
||||
keys = set(self.meths)
|
||||
|
||||
# add the methods listed in the features
|
||||
self.features = Utils.to_list(self.features)
|
||||
for x in self.features + ['*']:
|
||||
st = task_gen.traits[x]
|
||||
if not st:
|
||||
warn('feature %r does not exist - bind at least one method to it' % x)
|
||||
keys.update(st)
|
||||
|
||||
# copy the precedence table
|
||||
prec = {}
|
||||
prec_tbl = self.prec or task_gen.prec
|
||||
for x in prec_tbl:
|
||||
if x in keys:
|
||||
prec[x] = prec_tbl[x]
|
||||
|
||||
# elements disconnected
|
||||
tmp = []
|
||||
for a in keys:
|
||||
for x in prec.values():
|
||||
if a in x: break
|
||||
else:
|
||||
tmp.append(a)
|
||||
|
||||
# topological sort
|
||||
out = []
|
||||
while tmp:
|
||||
e = tmp.pop()
|
||||
if e in keys: out.append(e)
|
||||
try:
|
||||
nlst = prec[e]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
del prec[e]
|
||||
for x in nlst:
|
||||
for y in prec:
|
||||
if x in prec[y]:
|
||||
break
|
||||
else:
|
||||
tmp.append(x)
|
||||
|
||||
if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
|
||||
out.reverse()
|
||||
self.meths = out
|
||||
|
||||
# then we run the methods in order
|
||||
debug('task_gen: posting %s %d', self, id(self))
|
||||
for x in out:
|
||||
try:
|
||||
v = getattr(self, x)
|
||||
except AttributeError:
|
||||
raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
|
||||
debug('task_gen: -> %s (%d)', x, id(self))
|
||||
v()
|
||||
|
||||
def post(self):
|
||||
"runs the code to create the tasks, do not subclass"
|
||||
if not self.name:
|
||||
if isinstance(self.target, list):
|
||||
self.name = ' '.join(self.target)
|
||||
else:
|
||||
self.name = self.target
|
||||
|
||||
if getattr(self, 'posted', None):
|
||||
#error("OBJECT ALREADY POSTED" + str( self))
|
||||
return
|
||||
|
||||
self.apply()
|
||||
self.posted = True
|
||||
debug('task_gen: posted %s', self.name)
|
||||
|
||||
def get_hook(self, ext):
|
||||
try: return self.mappings[ext]
|
||||
except KeyError:
|
||||
try: return task_gen.mappings[ext]
|
||||
except KeyError: return None
|
||||
|
||||
# TODO waf 1.6: always set the environment
|
||||
# TODO waf 1.6: create_task(self, name, inputs, outputs)
|
||||
def create_task(self, name, src=None, tgt=None, env=None):
|
||||
env = env or self.env
|
||||
task = Task.TaskBase.classes[name](env.copy(), generator=self)
|
||||
if src:
|
||||
task.set_inputs(src)
|
||||
if tgt:
|
||||
task.set_outputs(tgt)
|
||||
self.tasks.append(task)
|
||||
return task
|
||||
|
||||
def name_to_obj(self, name):
|
||||
return self.bld.name_to_obj(name, self.env)
|
||||
|
||||
def get_tgen_by_name(self, name):
|
||||
return self.bld.get_tgen_by_name(name)
|
||||
|
||||
def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
|
||||
"""
|
||||
The attributes "excludes" and "exts" must be lists to avoid the confusion
|
||||
find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
|
||||
|
||||
do not use absolute paths
|
||||
do not use paths outside of the source tree
|
||||
the files or folder beginning by . are not returned
|
||||
|
||||
# TODO: remove in Waf 1.6
|
||||
"""
|
||||
|
||||
err_msg = "'%s' attribute must be a list"
|
||||
if not isinstance(excludes, list):
|
||||
raise Utils.WscriptError(err_msg % 'excludes')
|
||||
if not isinstance(exts, list):
|
||||
raise Utils.WscriptError(err_msg % 'exts')
|
||||
|
||||
lst = []
|
||||
|
||||
#make sure dirnames is a list helps with dirnames with spaces
|
||||
dirnames = self.to_list(dirnames)
|
||||
|
||||
ext_lst = exts or list(self.mappings.keys()) + list(task_gen.mappings.keys())
|
||||
|
||||
for name in dirnames:
|
||||
anode = self.path.find_dir(name)
|
||||
|
||||
if not anode or not anode.is_child_of(self.bld.srcnode):
|
||||
raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
|
||||
", or it's not child of '%s'." % (name, self.bld.srcnode))
|
||||
|
||||
self.bld.rescan(anode)
|
||||
for name in self.bld.cache_dir_contents[anode.id]:
|
||||
|
||||
# ignore hidden files
|
||||
if name.startswith('.'):
|
||||
continue
|
||||
|
||||
(base, ext) = os.path.splitext(name)
|
||||
if ext in ext_lst and not name in lst and not name in excludes:
|
||||
lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
|
||||
|
||||
lst.sort()
|
||||
self.source = self.to_list(self.source)
|
||||
if not self.source: self.source = lst
|
||||
else: self.source += lst
|
||||
|
||||
def clone(self, env):
|
||||
"""when creating a clone in a task generator method,
|
||||
make sure to set posted=False on the clone
|
||||
else the other task generator will not create its tasks"""
|
||||
newobj = task_gen(bld=self.bld)
|
||||
for x in self.__dict__:
|
||||
if x in ['env', 'bld']:
|
||||
continue
|
||||
elif x in ["path", "features"]:
|
||||
setattr(newobj, x, getattr(self, x))
|
||||
else:
|
||||
setattr(newobj, x, copy.copy(getattr(self, x)))
|
||||
|
||||
newobj.__class__ = self.__class__
|
||||
if isinstance(env, str):
|
||||
newobj.env = self.bld.all_envs[env].copy()
|
||||
else:
|
||||
newobj.env = env.copy()
|
||||
|
||||
return newobj
|
||||
|
||||
def get_inst_path(self):
|
||||
return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
|
||||
|
||||
def set_inst_path(self, val):
|
||||
self._install_path = val
|
||||
|
||||
install_path = property(get_inst_path, set_inst_path)
|
||||
|
||||
|
||||
def get_chmod(self):
|
||||
return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
|
||||
|
||||
def set_chmod(self, val):
|
||||
self._chmod = val
|
||||
|
||||
chmod = property(get_chmod, set_chmod)
|
||||
|
||||
def declare_extension(var, func):
|
||||
try:
|
||||
for x in Utils.to_list(var):
|
||||
task_gen.mappings[x] = func
|
||||
except:
|
||||
raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
|
||||
task_gen.mapped[func.__name__] = func
|
||||
|
||||
def declare_order(*k):
|
||||
assert(len(k) > 1)
|
||||
n = len(k) - 1
|
||||
for i in xrange(n):
|
||||
f1 = k[i]
|
||||
f2 = k[i+1]
|
||||
if not f1 in task_gen.prec[f2]:
|
||||
task_gen.prec[f2].append(f1)
|
||||
|
||||
def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
|
||||
install=0, before=[], after=[], decider=None, rule=None, scan=None):
|
||||
"""
|
||||
see Tools/flex.py for an example
|
||||
while i do not like such wrappers, some people really do
|
||||
"""
|
||||
|
||||
action = action or rule
|
||||
if isinstance(action, str):
|
||||
act = Task.simple_task_type(name, action, color=color)
|
||||
else:
|
||||
act = Task.task_type_from_func(name, action, color=color)
|
||||
act.ext_in = tuple(Utils.to_list(ext_in))
|
||||
act.ext_out = tuple(Utils.to_list(ext_out))
|
||||
act.before = Utils.to_list(before)
|
||||
act.after = Utils.to_list(after)
|
||||
act.scan = scan
|
||||
|
||||
def x_file(self, node):
|
||||
if decider:
|
||||
ext = decider(self, node)
|
||||
else:
|
||||
ext = ext_out
|
||||
|
||||
if isinstance(ext, str):
|
||||
out_source = node.change_ext(ext)
|
||||
if reentrant:
|
||||
self.allnodes.append(out_source)
|
||||
elif isinstance(ext, list):
|
||||
out_source = [node.change_ext(x) for x in ext]
|
||||
if reentrant:
|
||||
for i in xrange((reentrant is True) and len(out_source) or reentrant):
|
||||
self.allnodes.append(out_source[i])
|
||||
else:
|
||||
# XXX: useless: it will fail on Utils.to_list above...
|
||||
raise Utils.WafError("do not know how to process %s" % str(ext))
|
||||
|
||||
tsk = self.create_task(name, node, out_source)
|
||||
|
||||
if node.__class__.bld.is_install:
|
||||
tsk.install = install
|
||||
|
||||
declare_extension(act.ext_in, x_file)
|
||||
return x_file
|
||||
|
||||
def bind_feature(name, methods):
|
||||
lst = Utils.to_list(methods)
|
||||
task_gen.traits[name].update(lst)
|
||||
|
||||
"""
|
||||
All the following decorators are registration decorators, i.e add an attribute to current class
|
||||
(task_gen and its derivatives), with same name as func, which points to func itself.
|
||||
For example:
|
||||
@taskgen
|
||||
def sayHi(self):
|
||||
print("hi")
|
||||
Now taskgen.sayHi() may be called
|
||||
|
||||
If python were really smart, it could infer itself the order of methods by looking at the
|
||||
attributes. A prerequisite for execution is to have the attribute set before.
|
||||
Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
|
||||
"""
|
||||
def taskgen(func):
|
||||
"""
|
||||
register a method as a task generator method
|
||||
"""
|
||||
setattr(task_gen, func.__name__, func)
|
||||
return func
|
||||
|
||||
def feature(*k):
|
||||
"""
|
||||
declare a task generator method that will be executed when the
|
||||
object attribute 'feature' contains the corresponding key(s)
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
for name in k:
|
||||
task_gen.traits[name].update([func.__name__])
|
||||
return func
|
||||
return deco
|
||||
|
||||
def before(*k):
|
||||
"""
|
||||
declare a task generator method which will be executed
|
||||
before the functions of given name(s)
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
for fun_name in k:
|
||||
if not func.__name__ in task_gen.prec[fun_name]:
|
||||
task_gen.prec[fun_name].append(func.__name__)
|
||||
return func
|
||||
return deco
|
||||
|
||||
def after(*k):
|
||||
"""
|
||||
declare a task generator method which will be executed
|
||||
after the functions of given name(s)
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
for fun_name in k:
|
||||
if not fun_name in task_gen.prec[func.__name__]:
|
||||
task_gen.prec[func.__name__].append(fun_name)
|
||||
return func
|
||||
return deco
|
||||
|
||||
def extension(var):
|
||||
"""
|
||||
declare a task generator method which will be invoked during
|
||||
the processing of source files for the extension given
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
try:
|
||||
for x in Utils.to_list(var):
|
||||
task_gen.mappings[x] = func
|
||||
except:
|
||||
raise Utils.WafError('extension takes either a list or a string %r' % var)
|
||||
task_gen.mapped[func.__name__] = func
|
||||
return func
|
||||
return deco
|
||||
|
||||
# TODO make certain the decorators may be used here
|
||||
|
||||
def apply_core(self):
|
||||
"""Process the attribute source
|
||||
transform the names into file nodes
|
||||
try to process the files by name first, later by extension"""
|
||||
# get the list of folders to use by the scanners
|
||||
# all our objects share the same include paths anyway
|
||||
find_resource = self.path.find_resource
|
||||
|
||||
for filename in self.to_list(self.source):
|
||||
# if self.mappings or task_gen.mappings contains a file of the same name
|
||||
x = self.get_hook(filename)
|
||||
if x:
|
||||
x(self, filename)
|
||||
else:
|
||||
node = find_resource(filename)
|
||||
if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
|
||||
self.allnodes.append(node)
|
||||
|
||||
for node in self.allnodes:
|
||||
# self.mappings or task_gen.mappings map the file extension to a function
|
||||
x = self.get_hook(node.suffix())
|
||||
|
||||
if not x:
|
||||
raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
|
||||
(str(node), self.__class__.mappings.keys(), self.__class__))
|
||||
x(self, node)
|
||||
feature('*')(apply_core)
|
||||
|
||||
def exec_rule(self):
|
||||
"""Process the attribute rule, when provided the method apply_core will be disabled
|
||||
"""
|
||||
if not getattr(self, 'rule', None):
|
||||
return
|
||||
|
||||
# someone may have removed it already
|
||||
try:
|
||||
self.meths.remove('apply_core')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# get the function and the variables
|
||||
func = self.rule
|
||||
|
||||
vars2 = []
|
||||
if isinstance(func, str):
|
||||
# use the shell by default for user-defined commands
|
||||
(func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
|
||||
func.code = self.rule
|
||||
|
||||
# create the task class
|
||||
name = getattr(self, 'name', None) or self.target or self.rule
|
||||
if not isinstance(name, str):
|
||||
name = str(self.idx)
|
||||
cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
|
||||
cls.color = getattr(self, 'color', 'BLUE')
|
||||
|
||||
# now create one instance
|
||||
tsk = self.create_task(name)
|
||||
|
||||
dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
|
||||
if dep_vars:
|
||||
tsk.dep_vars = dep_vars
|
||||
if isinstance(self.rule, str):
|
||||
tsk.env.ruledeps = self.rule
|
||||
else:
|
||||
# only works if the function is in a global module such as a waf tool
|
||||
tsk.env.ruledeps = Utils.h_fun(self.rule)
|
||||
|
||||
# we assume that the user knows that without inputs or outputs
|
||||
#if not getattr(self, 'target', None) and not getattr(self, 'source', None):
|
||||
# cls.quiet = True
|
||||
|
||||
if getattr(self, 'target', None):
|
||||
cls.quiet = True
|
||||
tsk.outputs = [self.path.find_or_declare(x) for x in self.to_list(self.target)]
|
||||
|
||||
if getattr(self, 'source', None):
|
||||
cls.quiet = True
|
||||
tsk.inputs = []
|
||||
for x in self.to_list(self.source):
|
||||
y = self.path.find_resource(x)
|
||||
if not y:
|
||||
raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
|
||||
tsk.inputs.append(y)
|
||||
|
||||
if self.allnodes:
|
||||
tsk.inputs.extend(self.allnodes)
|
||||
|
||||
if getattr(self, 'scan', None):
|
||||
cls.scan = self.scan
|
||||
|
||||
if getattr(self, 'install_path', None):
|
||||
tsk.install_path = self.install_path
|
||||
|
||||
if getattr(self, 'cwd', None):
|
||||
tsk.cwd = self.cwd
|
||||
|
||||
if getattr(self, 'on_results', None) or getattr(self, 'update_outputs', None):
|
||||
Task.update_outputs(cls)
|
||||
|
||||
if getattr(self, 'always', None):
|
||||
Task.always_run(cls)
|
||||
|
||||
for x in ['after', 'before', 'ext_in', 'ext_out']:
|
||||
setattr(cls, x, getattr(self, x, []))
|
||||
feature('*')(exec_rule)
|
||||
before('apply_core')(exec_rule)
|
||||
|
||||
def sequence_order(self):
|
||||
"""
|
||||
add a strict sequential constraint between the tasks generated by task generators
|
||||
it uses the fact that task generators are posted in order
|
||||
it will not post objects which belong to other folders
|
||||
there is also an awesome trick for executing the method in last position
|
||||
|
||||
to use:
|
||||
bld(features='javac seq')
|
||||
bld(features='jar seq')
|
||||
|
||||
to start a new sequence, set the attribute seq_start, for example:
|
||||
obj.seq_start = True
|
||||
"""
|
||||
if self.meths and self.meths[-1] != 'sequence_order':
|
||||
self.meths.append('sequence_order')
|
||||
return
|
||||
|
||||
if getattr(self, 'seq_start', None):
|
||||
return
|
||||
|
||||
# all the tasks previously declared must be run before these
|
||||
if getattr(self.bld, 'prev', None):
|
||||
self.bld.prev.post()
|
||||
for x in self.bld.prev.tasks:
|
||||
for y in self.tasks:
|
||||
y.set_run_after(x)
|
||||
|
||||
self.bld.prev = self
|
||||
|
||||
feature('seq')(sequence_order)
|
3
third_party/waf/wafadmin/Tools/__init__.py
vendored
3
third_party/waf/wafadmin/Tools/__init__.py
vendored
@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
34
third_party/waf/wafadmin/Tools/ar.py
vendored
34
third_party/waf/wafadmin/Tools/ar.py
vendored
@ -1,34 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2008 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
|
||||
"ar and ranlib"
|
||||
|
||||
import os, sys
|
||||
import Task, Utils
|
||||
from Configure import conftest
|
||||
|
||||
ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
|
||||
cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
|
||||
cls.maxjobs = 1
|
||||
cls.install = Utils.nada
|
||||
|
||||
# remove the output in case it already exists
|
||||
old = cls.run
|
||||
def wrap(self):
|
||||
try: os.remove(self.outputs[0].abspath(self.env))
|
||||
except OSError: pass
|
||||
return old(self)
|
||||
setattr(cls, 'run', wrap)
|
||||
|
||||
def detect(conf):
|
||||
conf.find_program('ar', var='AR')
|
||||
conf.find_program('ranlib', var='RANLIB')
|
||||
conf.env.ARFLAGS = 'rcs'
|
||||
|
||||
@conftest
|
||||
def find_ar(conf):
|
||||
v = conf.env
|
||||
conf.check_tool('ar')
|
||||
if not v['AR']: conf.fatal('ar is required for static libraries - not found')
|
37
third_party/waf/wafadmin/Tools/bison.py
vendored
37
third_party/waf/wafadmin/Tools/bison.py
vendored
@ -1,37 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# John O'Meara, 2006
|
||||
# Thomas Nagy 2009
|
||||
|
||||
"Bison processing"
|
||||
|
||||
import Task
|
||||
from TaskGen import extension
|
||||
|
||||
bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
|
||||
cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
|
||||
|
||||
@extension(['.y', '.yc', '.yy'])
|
||||
def big_bison(self, node):
|
||||
"""when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
|
||||
has_h = '-d' in self.env['BISONFLAGS']
|
||||
|
||||
outs = []
|
||||
if node.name.endswith('.yc'):
|
||||
outs.append(node.change_ext('.tab.cc'))
|
||||
if has_h:
|
||||
outs.append(node.change_ext('.tab.hh'))
|
||||
else:
|
||||
outs.append(node.change_ext('.tab.c'))
|
||||
if has_h:
|
||||
outs.append(node.change_ext('.tab.h'))
|
||||
|
||||
tsk = self.create_task('bison', node, outs)
|
||||
tsk.cwd = node.bld_dir(tsk.env)
|
||||
|
||||
# and the c/cxx file must be compiled too
|
||||
self.allnodes.append(outs[0])
|
||||
|
||||
def detect(conf):
|
||||
bison = conf.find_program('bison', var='BISON', mandatory=True)
|
||||
conf.env['BISONFLAGS'] = '-d'
|
99
third_party/waf/wafadmin/Tools/cc.py
vendored
99
third_party/waf/wafadmin/Tools/cc.py
vendored
@ -1,99 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"Base for c programs/libraries"
|
||||
|
||||
import os
|
||||
import TaskGen, Build, Utils, Task
|
||||
from Logs import debug
|
||||
import ccroot
|
||||
from TaskGen import feature, before, extension, after
|
||||
|
||||
g_cc_flag_vars = [
|
||||
'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
|
||||
'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
|
||||
'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
|
||||
|
||||
EXT_CC = ['.c']
|
||||
|
||||
g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
|
||||
|
||||
# TODO remove in waf 1.6
|
||||
class cc_taskgen(ccroot.ccroot_abstract):
|
||||
pass
|
||||
|
||||
@feature('c', 'cc')
|
||||
@before('apply_type_vars')
|
||||
@after('default_cc')
|
||||
def init_cc(self):
|
||||
self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
|
||||
self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
|
||||
|
||||
if not self.env['CC_NAME']:
|
||||
raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
|
||||
|
||||
@feature('c', 'cc')
|
||||
@after('apply_incpaths')
|
||||
def apply_obj_vars_cc(self):
|
||||
"""after apply_incpaths for INC_PATHS"""
|
||||
env = self.env
|
||||
app = env.append_unique
|
||||
cpppath_st = env['CPPPATH_ST']
|
||||
|
||||
# local flags come first
|
||||
# set the user-defined includes paths
|
||||
for i in env['INC_PATHS']:
|
||||
app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
|
||||
app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
|
||||
|
||||
# set the library include paths
|
||||
for i in env['CPPPATH']:
|
||||
app('_CCINCFLAGS', cpppath_st % i)
|
||||
|
||||
@feature('c', 'cc')
|
||||
@after('apply_lib_vars')
|
||||
def apply_defines_cc(self):
|
||||
"""after uselib is set for CCDEFINES"""
|
||||
self.defines = getattr(self, 'defines', [])
|
||||
lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
|
||||
milst = []
|
||||
|
||||
# now process the local defines
|
||||
for defi in lst:
|
||||
if not defi in milst:
|
||||
milst.append(defi)
|
||||
|
||||
# CCDEFINES_
|
||||
libs = self.to_list(self.uselib)
|
||||
for l in libs:
|
||||
val = self.env['CCDEFINES_'+l]
|
||||
if val: milst += val
|
||||
self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
|
||||
y = self.env['CCDEFINES_ST']
|
||||
self.env.append_unique('_CCDEFFLAGS', [y%x for x in milst])
|
||||
|
||||
@extension(EXT_CC)
|
||||
def c_hook(self, node):
|
||||
# create the compilation task: cpp or cc
|
||||
if getattr(self, 'obj_ext', None):
|
||||
obj_ext = self.obj_ext
|
||||
else:
|
||||
obj_ext = '_%d.o' % self.idx
|
||||
|
||||
task = self.create_task('cc', node, node.change_ext(obj_ext))
|
||||
try:
|
||||
self.compiled_tasks.append(task)
|
||||
except AttributeError:
|
||||
raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
|
||||
return task
|
||||
|
||||
cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
|
||||
cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
|
||||
cls.scan = ccroot.scan
|
||||
cls.vars.append('CCDEPS')
|
||||
|
||||
link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
|
||||
cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
|
||||
cls.maxjobs = 1
|
||||
cls.install = Utils.nada
|
639
third_party/waf/wafadmin/Tools/ccroot.py
vendored
639
third_party/waf/wafadmin/Tools/ccroot.py
vendored
@ -1,639 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2008 (ita)
|
||||
|
||||
"base for all c/c++ programs and libraries"
|
||||
|
||||
import os, sys, re
|
||||
import TaskGen, Task, Utils, preproc, Logs, Build, Options
|
||||
from Logs import error, debug, warn
|
||||
from Utils import md5
|
||||
from TaskGen import taskgen, after, before, feature
|
||||
from Constants import *
|
||||
from Configure import conftest
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
import config_c # <- necessary for the configuration, do not touch
|
||||
|
||||
USE_TOP_LEVEL = False
|
||||
|
||||
def get_cc_version(conf, cc, gcc=False, icc=False):
|
||||
|
||||
cmd = cc + ['-dM', '-E', '-']
|
||||
try:
|
||||
p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
|
||||
p.stdin.write('\n')
|
||||
out = p.communicate()[0]
|
||||
except:
|
||||
conf.fatal('could not determine the compiler version %r' % cmd)
|
||||
|
||||
# PY3K: do not touch
|
||||
out = str(out)
|
||||
|
||||
if gcc:
|
||||
if out.find('__INTEL_COMPILER') >= 0:
|
||||
conf.fatal('The intel compiler pretends to be gcc')
|
||||
if out.find('__GNUC__') < 0:
|
||||
conf.fatal('Could not determine the compiler type')
|
||||
|
||||
if icc and out.find('__INTEL_COMPILER') < 0:
|
||||
conf.fatal('Not icc/icpc')
|
||||
|
||||
k = {}
|
||||
if icc or gcc:
|
||||
out = out.split('\n')
|
||||
import shlex
|
||||
|
||||
for line in out:
|
||||
lst = shlex.split(line)
|
||||
if len(lst)>2:
|
||||
key = lst[1]
|
||||
val = lst[2]
|
||||
k[key] = val
|
||||
|
||||
def isD(var):
|
||||
return var in k
|
||||
|
||||
def isT(var):
|
||||
return var in k and k[var] != '0'
|
||||
|
||||
# Some documentation is available at http://predef.sourceforge.net
|
||||
# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
|
||||
mp1 = {
|
||||
'__linux__' : 'linux',
|
||||
'__GNU__' : 'gnu',
|
||||
'__FreeBSD__' : 'freebsd',
|
||||
'__NetBSD__' : 'netbsd',
|
||||
'__OpenBSD__' : 'openbsd',
|
||||
'__sun' : 'sunos',
|
||||
'__hpux' : 'hpux',
|
||||
'__sgi' : 'irix',
|
||||
'_AIX' : 'aix',
|
||||
'__CYGWIN__' : 'cygwin',
|
||||
'__MSYS__' : 'msys',
|
||||
'_UWIN' : 'uwin',
|
||||
'_WIN64' : 'win32',
|
||||
'_WIN32' : 'win32',
|
||||
'__POWERPC__' : 'powerpc',
|
||||
}
|
||||
|
||||
for i in mp1:
|
||||
if isD(i):
|
||||
conf.env.DEST_OS = mp1[i]
|
||||
break
|
||||
else:
|
||||
if isD('__APPLE__') and isD('__MACH__'):
|
||||
conf.env.DEST_OS = 'darwin'
|
||||
elif isD('__unix__'): # unix must be tested last as it's a generic fallback
|
||||
conf.env.DEST_OS = 'generic'
|
||||
|
||||
if isD('__ELF__'):
|
||||
conf.env.DEST_BINFMT = 'elf'
|
||||
elif isD('__WINNT__') or isD('__CYGWIN__'):
|
||||
conf.env.DEST_BINFMT = 'pe'
|
||||
elif isD('__APPLE__'):
|
||||
conf.env.DEST_BINFMT = 'mac-o'
|
||||
|
||||
mp2 = {
|
||||
'__x86_64__' : 'x86_64',
|
||||
'__i386__' : 'x86',
|
||||
'__ia64__' : 'ia',
|
||||
'__mips__' : 'mips',
|
||||
'__sparc__' : 'sparc',
|
||||
'__alpha__' : 'alpha',
|
||||
'__arm__' : 'arm',
|
||||
'__hppa__' : 'hppa',
|
||||
'__powerpc__' : 'powerpc',
|
||||
}
|
||||
for i in mp2:
|
||||
if isD(i):
|
||||
conf.env.DEST_CPU = mp2[i]
|
||||
break
|
||||
|
||||
debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
|
||||
conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
|
||||
return k
|
||||
|
||||
class DEBUG_LEVELS:
|
||||
"""Will disappear in waf 1.6"""
|
||||
ULTRADEBUG = "ultradebug"
|
||||
DEBUG = "debug"
|
||||
RELEASE = "release"
|
||||
OPTIMIZED = "optimized"
|
||||
CUSTOM = "custom"
|
||||
|
||||
ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
|
||||
|
||||
def scan(self):
|
||||
"look for .h the .cpp need"
|
||||
debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
|
||||
|
||||
# TODO waf 1.6 - assume the default input has exactly one file
|
||||
|
||||
if len(self.inputs) == 1:
|
||||
node = self.inputs[0]
|
||||
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
|
||||
if Logs.verbose:
|
||||
debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
|
||||
return (nodes, names)
|
||||
|
||||
all_nodes = []
|
||||
all_names = []
|
||||
seen = set()
|
||||
for node in self.inputs:
|
||||
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
|
||||
if Logs.verbose:
|
||||
debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
|
||||
for x in nodes:
|
||||
if id(x) in seen: continue
|
||||
seen.add(id(x))
|
||||
all_nodes.append(x)
|
||||
for x in names:
|
||||
if not x in all_names:
|
||||
all_names.append(x)
|
||||
return (all_nodes, all_names)
|
||||
|
||||
class ccroot_abstract(TaskGen.task_gen):
|
||||
"Parent class for programs and libraries in languages c, c++ and moc (Qt)"
|
||||
def __init__(self, *k, **kw):
|
||||
# COMPAT remove in waf 1.6 TODO
|
||||
if len(k) > 1:
|
||||
k = list(k)
|
||||
if k[1][0] != 'c':
|
||||
k[1] = 'c' + k[1]
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
def get_target_name(self):
|
||||
tp = 'program'
|
||||
for x in self.features:
|
||||
if x in ['cshlib', 'cstaticlib']:
|
||||
tp = x.lstrip('c')
|
||||
|
||||
pattern = self.env[tp + '_PATTERN']
|
||||
if not pattern: pattern = '%s'
|
||||
|
||||
dir, name = os.path.split(self.target)
|
||||
|
||||
if 'cshlib' in self.features and getattr(self, 'vnum', None):
|
||||
nums = self.vnum.split('.')
|
||||
if self.env.DEST_BINFMT == 'pe':
|
||||
# include the version in the dll file name,
|
||||
# the import lib file name stays unversionned.
|
||||
name = name + '-' + nums[0]
|
||||
elif self.env.DEST_OS == 'openbsd':
|
||||
pattern = '%s.%s' % (pattern, nums[0])
|
||||
if len(nums) >= 2:
|
||||
pattern += '.%s' % nums[1]
|
||||
|
||||
return os.path.join(dir, pattern % name)
|
||||
|
||||
@feature('c', 'cc', 'cxx')
|
||||
@before('apply_core')
|
||||
def default_cc(self):
|
||||
"""compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
|
||||
Utils.def_attrs(self,
|
||||
includes = '',
|
||||
defines= '',
|
||||
rpaths = '',
|
||||
uselib = '',
|
||||
uselib_local = '',
|
||||
add_objects = '',
|
||||
p_flag_vars = [],
|
||||
p_type_vars = [],
|
||||
compiled_tasks = [],
|
||||
link_task = None)
|
||||
|
||||
# The only thing we need for cross-compilation is DEST_BINFMT.
|
||||
# At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
|
||||
# Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
|
||||
if not self.env.DEST_BINFMT:
|
||||
# Infer the binary format from the os name.
|
||||
self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
|
||||
self.env.DEST_OS or Utils.unversioned_sys_platform())
|
||||
|
||||
if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
|
||||
if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
|
||||
|
||||
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
|
||||
def apply_verif(self):
|
||||
"""no particular order, used for diagnostic"""
|
||||
if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
|
||||
raise Utils.WafError('no source files specified for %s' % self)
|
||||
if not self.target:
|
||||
raise Utils.WafError('no target for %s' % self)
|
||||
|
||||
# TODO reference the d programs, shlibs in d.py, not here
|
||||
|
||||
@feature('cprogram', 'dprogram')
|
||||
@after('default_cc')
|
||||
@before('apply_core')
|
||||
def vars_target_cprogram(self):
|
||||
self.default_install_path = self.env.BINDIR
|
||||
self.default_chmod = O755
|
||||
|
||||
@after('default_cc')
|
||||
@feature('cshlib', 'dshlib')
|
||||
@before('apply_core')
|
||||
def vars_target_cshlib(self):
|
||||
if self.env.DEST_BINFMT == 'pe':
|
||||
# set execute bit on libs to avoid 'permission denied' (issue 283)
|
||||
self.default_chmod = O755
|
||||
self.default_install_path = self.env.BINDIR
|
||||
else:
|
||||
self.default_install_path = self.env.LIBDIR
|
||||
|
||||
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
|
||||
@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
|
||||
def default_link_install(self):
|
||||
"""you may kill this method to inject your own installation for the first element
|
||||
any other install should only process its own nodes and not those from the others"""
|
||||
if self.install_path:
|
||||
self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod)
|
||||
|
||||
@feature('c', 'cc', 'cxx')
|
||||
@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
|
||||
def apply_incpaths(self):
|
||||
"""used by the scanner
|
||||
after processing the uselib for CPPPATH
|
||||
after apply_core because some processing may add include paths
|
||||
"""
|
||||
lst = []
|
||||
# TODO move the uselib processing out of here
|
||||
for lib in self.to_list(self.uselib):
|
||||
for path in self.env['CPPPATH_' + lib]:
|
||||
if not path in lst:
|
||||
lst.append(path)
|
||||
if preproc.go_absolute:
|
||||
for path in preproc.standard_includes:
|
||||
if not path in lst:
|
||||
lst.append(path)
|
||||
|
||||
for path in self.to_list(self.includes):
|
||||
if not path in lst:
|
||||
if preproc.go_absolute or not os.path.isabs(path):
|
||||
lst.append(path)
|
||||
else:
|
||||
self.env.prepend_value('CPPPATH', path)
|
||||
|
||||
for path in lst:
|
||||
node = None
|
||||
if os.path.isabs(path):
|
||||
if preproc.go_absolute:
|
||||
node = self.bld.root.find_dir(path)
|
||||
elif path[0] == '#':
|
||||
node = self.bld.srcnode
|
||||
if len(path) > 1:
|
||||
node = node.find_dir(path[1:])
|
||||
else:
|
||||
node = self.path.find_dir(path)
|
||||
|
||||
if node:
|
||||
self.env.append_value('INC_PATHS', node)
|
||||
|
||||
# TODO WAF 1.6
|
||||
if USE_TOP_LEVEL:
|
||||
self.env.append_value('INC_PATHS', self.bld.srcnode)
|
||||
|
||||
@feature('c', 'cc', 'cxx')
|
||||
@after('init_cc', 'init_cxx')
|
||||
@before('apply_lib_vars')
|
||||
def apply_type_vars(self):
|
||||
"""before apply_lib_vars because we modify uselib
|
||||
after init_cc and init_cxx because web need p_type_vars
|
||||
"""
|
||||
for x in self.features:
|
||||
if not x in ['cprogram', 'cstaticlib', 'cshlib']:
|
||||
continue
|
||||
x = x.lstrip('c')
|
||||
|
||||
# if the type defines uselib to add, add them
|
||||
st = self.env[x + '_USELIB']
|
||||
if st: self.uselib = self.uselib + ' ' + st
|
||||
|
||||
# each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
|
||||
# so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
|
||||
for var in self.p_type_vars:
|
||||
compvar = '%s_%s' % (x, var)
|
||||
#print compvar
|
||||
value = self.env[compvar]
|
||||
if value: self.env.append_value(var, value)
|
||||
|
||||
@feature('cprogram', 'cshlib', 'cstaticlib')
|
||||
@after('apply_core')
|
||||
def apply_link(self):
|
||||
"""executes after apply_core for collecting 'compiled_tasks'
|
||||
use a custom linker if specified (self.link='name-of-custom-link-task')"""
|
||||
link = getattr(self, 'link', None)
|
||||
if not link:
|
||||
if 'cstaticlib' in self.features: link = 'static_link'
|
||||
elif 'cxx' in self.features: link = 'cxx_link'
|
||||
else: link = 'cc_link'
|
||||
|
||||
tsk = self.create_task(link)
|
||||
outputs = [t.outputs[0] for t in self.compiled_tasks]
|
||||
tsk.set_inputs(outputs)
|
||||
tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
|
||||
|
||||
self.link_task = tsk
|
||||
|
||||
@feature('c', 'cc', 'cxx')
|
||||
@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
|
||||
def apply_lib_vars(self):
|
||||
"""after apply_link because of 'link_task'
|
||||
after default_cc because of the attribute 'uselib'"""
|
||||
|
||||
# after 'apply_core' in case if 'cc' if there is no link
|
||||
|
||||
env = self.env
|
||||
|
||||
# 1. the case of the libs defined in the project (visit ancestors first)
|
||||
# the ancestors external libraries (uselib) will be prepended
|
||||
self.uselib = self.to_list(self.uselib)
|
||||
names = self.to_list(self.uselib_local)
|
||||
|
||||
seen = set([])
|
||||
tmp = Utils.deque(names) # consume a copy of the list of names
|
||||
while tmp:
|
||||
lib_name = tmp.popleft()
|
||||
# visit dependencies only once
|
||||
if lib_name in seen:
|
||||
continue
|
||||
|
||||
y = self.name_to_obj(lib_name)
|
||||
if not y:
|
||||
raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
|
||||
y.post()
|
||||
seen.add(lib_name)
|
||||
|
||||
# object has ancestors to process (shared libraries): add them to the end of the list
|
||||
if getattr(y, 'uselib_local', None):
|
||||
lst = y.to_list(y.uselib_local)
|
||||
if 'cshlib' in y.features or 'cprogram' in y.features:
|
||||
lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
|
||||
tmp.extend(lst)
|
||||
|
||||
# link task and flags
|
||||
if getattr(y, 'link_task', None):
|
||||
|
||||
link_name = y.target[y.target.rfind(os.sep) + 1:]
|
||||
if 'cstaticlib' in y.features:
|
||||
env.append_value('STATICLIB', link_name)
|
||||
elif 'cshlib' in y.features or 'cprogram' in y.features:
|
||||
# WARNING some linkers can link against programs
|
||||
env.append_value('LIB', link_name)
|
||||
|
||||
# the order
|
||||
self.link_task.set_run_after(y.link_task)
|
||||
|
||||
# for the recompilation
|
||||
dep_nodes = getattr(self.link_task, 'dep_nodes', [])
|
||||
self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
|
||||
|
||||
# add the link path too
|
||||
tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
|
||||
if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
|
||||
|
||||
# add ancestors uselib too - but only propagate those that have no staticlib
|
||||
for v in self.to_list(y.uselib):
|
||||
if not env['STATICLIB_' + v]:
|
||||
if not v in self.uselib:
|
||||
self.uselib.insert(0, v)
|
||||
|
||||
# if the library task generator provides 'export_incdirs', add to the include path
|
||||
# the export_incdirs must be a list of paths relative to the other library
|
||||
if getattr(y, 'export_incdirs', None):
|
||||
for x in self.to_list(y.export_incdirs):
|
||||
node = y.path.find_dir(x)
|
||||
if not node:
|
||||
raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
|
||||
self.env.append_unique('INC_PATHS', node)
|
||||
|
||||
# 2. the case of the libs defined outside
|
||||
for x in self.uselib:
|
||||
for v in self.p_flag_vars:
|
||||
val = self.env[v + '_' + x]
|
||||
if val: self.env.append_value(v, val)
|
||||
|
||||
@feature('cprogram', 'cstaticlib', 'cshlib')
|
||||
@after('init_cc', 'init_cxx', 'apply_link')
|
||||
def apply_objdeps(self):
|
||||
"add the .o files produced by some other object files in the same manner as uselib_local"
|
||||
if not getattr(self, 'add_objects', None): return
|
||||
|
||||
seen = []
|
||||
names = self.to_list(self.add_objects)
|
||||
while names:
|
||||
x = names[0]
|
||||
|
||||
# visit dependencies only once
|
||||
if x in seen:
|
||||
names = names[1:]
|
||||
continue
|
||||
|
||||
# object does not exist ?
|
||||
y = self.name_to_obj(x)
|
||||
if not y:
|
||||
raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
|
||||
|
||||
# object has ancestors to process first ? update the list of names
|
||||
if getattr(y, 'add_objects', None):
|
||||
added = 0
|
||||
lst = y.to_list(y.add_objects)
|
||||
lst.reverse()
|
||||
for u in lst:
|
||||
if u in seen: continue
|
||||
added = 1
|
||||
names = [u]+names
|
||||
if added: continue # list of names modified, loop
|
||||
|
||||
# safe to process the current object
|
||||
y.post()
|
||||
seen.append(x)
|
||||
|
||||
for t in y.compiled_tasks:
|
||||
self.link_task.inputs.extend(t.outputs)
|
||||
|
||||
@feature('cprogram', 'cshlib', 'cstaticlib')
|
||||
@after('apply_lib_vars')
|
||||
def apply_obj_vars(self):
|
||||
"""after apply_lib_vars for uselib"""
|
||||
v = self.env
|
||||
lib_st = v['LIB_ST']
|
||||
staticlib_st = v['STATICLIB_ST']
|
||||
libpath_st = v['LIBPATH_ST']
|
||||
staticlibpath_st = v['STATICLIBPATH_ST']
|
||||
rpath_st = v['RPATH_ST']
|
||||
|
||||
app = v.append_unique
|
||||
|
||||
if v['FULLSTATIC']:
|
||||
v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
|
||||
|
||||
for i in v['RPATH']:
|
||||
if i and rpath_st:
|
||||
app('LINKFLAGS', rpath_st % i)
|
||||
|
||||
for i in v['LIBPATH']:
|
||||
app('LINKFLAGS', libpath_st % i)
|
||||
app('LINKFLAGS', staticlibpath_st % i)
|
||||
|
||||
if v['STATICLIB']:
|
||||
v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
|
||||
k = [(staticlib_st % i) for i in v['STATICLIB']]
|
||||
app('LINKFLAGS', k)
|
||||
|
||||
# fully static binaries ?
|
||||
if not v['FULLSTATIC']:
|
||||
if v['STATICLIB'] or v['LIB']:
|
||||
v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
|
||||
|
||||
app('LINKFLAGS', [lib_st % i for i in v['LIB']])
|
||||
|
||||
@after('apply_link')
|
||||
def process_obj_files(self):
|
||||
if not hasattr(self, 'obj_files'): return
|
||||
for x in self.obj_files:
|
||||
node = self.path.find_resource(x)
|
||||
self.link_task.inputs.append(node)
|
||||
|
||||
@taskgen
|
||||
def add_obj_file(self, file):
|
||||
"""Small example on how to link object files as if they were source
|
||||
obj = bld.create_obj('cc')
|
||||
obj.add_obj_file('foo.o')"""
|
||||
if not hasattr(self, 'obj_files'): self.obj_files = []
|
||||
if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
|
||||
self.obj_files.append(file)
|
||||
|
||||
c_attrs = {
|
||||
'cxxflag' : 'CXXFLAGS',
|
||||
'cflag' : 'CCFLAGS',
|
||||
'ccflag' : 'CCFLAGS',
|
||||
'linkflag' : 'LINKFLAGS',
|
||||
'ldflag' : 'LINKFLAGS',
|
||||
'lib' : 'LIB',
|
||||
'libpath' : 'LIBPATH',
|
||||
'staticlib': 'STATICLIB',
|
||||
'staticlibpath': 'STATICLIBPATH',
|
||||
'rpath' : 'RPATH',
|
||||
'framework' : 'FRAMEWORK',
|
||||
'frameworkpath' : 'FRAMEWORKPATH'
|
||||
}
|
||||
|
||||
@feature('c', 'cc', 'cxx')
|
||||
@before('init_cxx', 'init_cc')
|
||||
@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
|
||||
def add_extra_flags(self):
|
||||
"""case and plural insensitive
|
||||
before apply_obj_vars for processing the library attributes
|
||||
"""
|
||||
for x in self.__dict__.keys():
|
||||
y = x.lower()
|
||||
if y[-1] == 's':
|
||||
y = y[:-1]
|
||||
if c_attrs.get(y, None):
|
||||
self.env.append_unique(c_attrs[y], getattr(self, x))
|
||||
|
||||
# ============ the code above must not know anything about import libs ==========
|
||||
|
||||
@feature('cshlib')
|
||||
@after('apply_link', 'default_cc')
|
||||
@before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
|
||||
def apply_implib(self):
|
||||
"""On mswindows, handle dlls and their import libs
|
||||
the .dll.a is the import lib and it is required for linking so it is installed too
|
||||
"""
|
||||
if not self.env.DEST_BINFMT == 'pe':
|
||||
return
|
||||
|
||||
self.meths.remove('default_link_install')
|
||||
|
||||
bindir = self.install_path
|
||||
if not bindir: return
|
||||
|
||||
# install the dll in the bin dir
|
||||
dll = self.link_task.outputs[0]
|
||||
self.bld.install_files(bindir, dll, self.env, self.chmod)
|
||||
|
||||
# add linker flags to generate the import lib
|
||||
implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
|
||||
|
||||
implib = dll.parent.find_or_declare(implib)
|
||||
self.link_task.outputs.append(implib)
|
||||
self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
|
||||
|
||||
self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
|
||||
|
||||
# ============ the code above must not know anything about vnum processing on unix platforms =========
|
||||
|
||||
@feature('cshlib')
|
||||
@after('apply_link')
|
||||
@before('apply_lib_vars', 'default_link_install')
|
||||
def apply_vnum(self):
|
||||
"""
|
||||
libfoo.so is installed as libfoo.so.1.2.3
|
||||
"""
|
||||
if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
|
||||
return
|
||||
|
||||
self.meths.remove('default_link_install')
|
||||
|
||||
link = self.link_task
|
||||
nums = self.vnum.split('.')
|
||||
node = link.outputs[0]
|
||||
|
||||
libname = node.name
|
||||
if libname.endswith('.dylib'):
|
||||
name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
|
||||
name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
|
||||
else:
|
||||
name3 = libname + '.' + self.vnum
|
||||
name2 = libname + '.' + nums[0]
|
||||
|
||||
if self.env.SONAME_ST:
|
||||
v = self.env.SONAME_ST % name2
|
||||
self.env.append_value('LINKFLAGS', v.split())
|
||||
|
||||
bld = self.bld
|
||||
nums = self.vnum.split('.')
|
||||
|
||||
path = self.install_path
|
||||
if not path: return
|
||||
|
||||
if self.env.DEST_OS == 'openbsd':
|
||||
libname = self.link_task.outputs[0].name
|
||||
bld.install_as('%s%s%s' % (path, os.sep, libname), node, env=self.env)
|
||||
else:
|
||||
bld.install_as(path + os.sep + name3, node, env=self.env)
|
||||
bld.symlink_as(path + os.sep + name2, name3)
|
||||
bld.symlink_as(path + os.sep + libname, name3)
|
||||
|
||||
# the following task is just to enable execution from the build dir :-/
|
||||
if self.env.DEST_OS != 'openbsd':
|
||||
self.create_task('vnum', node, [node.parent.find_or_declare(name2), node.parent.find_or_declare(name3)])
|
||||
|
||||
def exec_vnum_link(self):
|
||||
for x in self.outputs:
|
||||
path = x.abspath(self.env)
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.symlink(self.inputs[0].name, path)
|
||||
except OSError:
|
||||
return 1
|
||||
|
||||
cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
|
||||
cls.quiet = 1
|
||||
|
||||
# ============ the --as-needed flag should added during the configuration, not at runtime =========
|
||||
|
||||
@conftest
|
||||
def add_as_needed(conf):
|
||||
if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME):
|
||||
conf.env.append_unique('LINKFLAGS', '--as-needed')
|
66
third_party/waf/wafadmin/Tools/compiler_cc.py
vendored
66
third_party/waf/wafadmin/Tools/compiler_cc.py
vendored
@ -1,66 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
|
||||
|
||||
import os, sys, imp, types, ccroot
|
||||
import optparse
|
||||
import Utils, Configure, Options
|
||||
from Logs import debug
|
||||
|
||||
c_compiler = {
|
||||
'win32': ['msvc', 'gcc'],
|
||||
'cygwin': ['gcc'],
|
||||
'darwin': ['gcc'],
|
||||
'aix': ['xlc', 'gcc'],
|
||||
'linux': ['gcc', 'icc', 'suncc'],
|
||||
'sunos': ['gcc', 'suncc'],
|
||||
'irix': ['gcc'],
|
||||
'hpux': ['gcc'],
|
||||
'gnu': ['gcc'],
|
||||
'default': ['gcc']
|
||||
}
|
||||
|
||||
def __list_possible_compiler(platform):
|
||||
try:
|
||||
return c_compiler[platform]
|
||||
except KeyError:
|
||||
return c_compiler["default"]
|
||||
|
||||
def detect(conf):
|
||||
"""
|
||||
for each compiler for the platform, try to configure the compiler
|
||||
in theory the tools should raise a configuration error if the compiler
|
||||
pretends to be something it is not (setting CC=icc and trying to configure gcc)
|
||||
"""
|
||||
try: test_for_compiler = Options.options.check_c_compiler
|
||||
except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
|
||||
orig = conf.env
|
||||
for compiler in test_for_compiler.split():
|
||||
conf.env = orig.copy()
|
||||
try:
|
||||
conf.check_tool(compiler)
|
||||
except Configure.ConfigurationError, e:
|
||||
debug('compiler_cc: %r' % e)
|
||||
else:
|
||||
if conf.env['CC']:
|
||||
orig.table = conf.env.get_merged_dict()
|
||||
conf.env = orig
|
||||
conf.check_message(compiler, '', True)
|
||||
conf.env['COMPILER_CC'] = compiler
|
||||
break
|
||||
conf.check_message(compiler, '', False)
|
||||
break
|
||||
else:
|
||||
conf.fatal('could not configure a c compiler!')
|
||||
|
||||
def set_options(opt):
|
||||
build_platform = Utils.unversioned_sys_platform()
|
||||
possible_compiler_list = __list_possible_compiler(build_platform)
|
||||
test_for_compiler = ' '.join(possible_compiler_list)
|
||||
cc_compiler_opts = opt.add_option_group("C Compiler Options")
|
||||
cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
|
||||
help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
|
||||
dest="check_c_compiler")
|
||||
|
||||
for c_compiler in test_for_compiler.split():
|
||||
opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
|
61
third_party/waf/wafadmin/Tools/compiler_cxx.py
vendored
61
third_party/waf/wafadmin/Tools/compiler_cxx.py
vendored
@ -1,61 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
|
||||
|
||||
import os, sys, imp, types, ccroot
|
||||
import optparse
|
||||
import Utils, Configure, Options
|
||||
from Logs import debug
|
||||
|
||||
cxx_compiler = {
|
||||
'win32': ['msvc', 'g++'],
|
||||
'cygwin': ['g++'],
|
||||
'darwin': ['g++'],
|
||||
'aix': ['xlc++', 'g++'],
|
||||
'linux': ['g++', 'icpc', 'sunc++'],
|
||||
'sunos': ['g++', 'sunc++'],
|
||||
'irix': ['g++'],
|
||||
'hpux': ['g++'],
|
||||
'gnu': ['g++'],
|
||||
'default': ['g++']
|
||||
}
|
||||
|
||||
def __list_possible_compiler(platform):
|
||||
try:
|
||||
return cxx_compiler[platform]
|
||||
except KeyError:
|
||||
return cxx_compiler["default"]
|
||||
|
||||
def detect(conf):
|
||||
try: test_for_compiler = Options.options.check_cxx_compiler
|
||||
except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
|
||||
orig = conf.env
|
||||
for compiler in test_for_compiler.split():
|
||||
try:
|
||||
conf.env = orig.copy()
|
||||
conf.check_tool(compiler)
|
||||
except Configure.ConfigurationError, e:
|
||||
debug('compiler_cxx: %r' % e)
|
||||
else:
|
||||
if conf.env['CXX']:
|
||||
orig.table = conf.env.get_merged_dict()
|
||||
conf.env = orig
|
||||
conf.check_message(compiler, '', True)
|
||||
conf.env['COMPILER_CXX'] = compiler
|
||||
break
|
||||
conf.check_message(compiler, '', False)
|
||||
break
|
||||
else:
|
||||
conf.fatal('could not configure a cxx compiler!')
|
||||
|
||||
def set_options(opt):
|
||||
build_platform = Utils.unversioned_sys_platform()
|
||||
possible_compiler_list = __list_possible_compiler(build_platform)
|
||||
test_for_compiler = ' '.join(possible_compiler_list)
|
||||
cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
|
||||
cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
|
||||
help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
|
||||
dest="check_cxx_compiler")
|
||||
|
||||
for cxx_compiler in test_for_compiler.split():
|
||||
opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
|
32
third_party/waf/wafadmin/Tools/compiler_d.py
vendored
32
third_party/waf/wafadmin/Tools/compiler_d.py
vendored
@ -1,32 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
|
||||
import os, sys, imp, types
|
||||
import Utils, Configure, Options
|
||||
|
||||
def detect(conf):
|
||||
if getattr(Options.options, 'check_dmd_first', None):
|
||||
test_for_compiler = ['dmd', 'gdc']
|
||||
else:
|
||||
test_for_compiler = ['gdc', 'dmd']
|
||||
|
||||
for d_compiler in test_for_compiler:
|
||||
try:
|
||||
conf.check_tool(d_compiler)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
else:
|
||||
conf.fatal('no suitable d compiler was found')
|
||||
|
||||
def set_options(opt):
|
||||
d_compiler_opts = opt.add_option_group('D Compiler Options')
|
||||
d_compiler_opts.add_option('--check-dmd-first', action='store_true',
|
||||
help='checks for the gdc compiler before dmd (default is the other way round)',
|
||||
dest='check_dmd_first',
|
||||
default=False)
|
||||
|
||||
for d_compiler in ['gdc', 'dmd']:
|
||||
opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)
|
754
third_party/waf/wafadmin/Tools/config_c.py
vendored
754
third_party/waf/wafadmin/Tools/config_c.py
vendored
@ -1,754 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2008 (ita)
|
||||
|
||||
"""
|
||||
c/c++ configuration routines
|
||||
"""
|
||||
|
||||
import os, imp, sys, shlex, shutil
|
||||
from Utils import md5
|
||||
import Build, Utils, Configure, Task, Options, Logs, TaskGen
|
||||
from Constants import *
|
||||
from Configure import conf, conftest
|
||||
|
||||
cfg_ver = {
|
||||
'atleast-version': '>=',
|
||||
'exact-version': '==',
|
||||
'max-version': '<=',
|
||||
}
|
||||
|
||||
SNIP1 = '''
|
||||
int main() {
|
||||
void *p;
|
||||
p=(void*)(%s);
|
||||
return 0;
|
||||
}
|
||||
'''
|
||||
|
||||
SNIP2 = '''
|
||||
int main() {
|
||||
if ((%(type_name)s *) 0) return 0;
|
||||
if (sizeof (%(type_name)s)) return 0;
|
||||
}
|
||||
'''
|
||||
|
||||
SNIP3 = '''
|
||||
int main() {
|
||||
return 0;
|
||||
}
|
||||
'''
|
||||
|
||||
def parse_flags(line, uselib, env):
|
||||
"""pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
|
||||
|
||||
lst = shlex.split(line)
|
||||
while lst:
|
||||
x = lst.pop(0)
|
||||
st = x[:2]
|
||||
ot = x[2:]
|
||||
app = env.append_value
|
||||
if st == '-I' or st == '/I':
|
||||
if not ot: ot = lst.pop(0)
|
||||
app('CPPPATH_' + uselib, ot)
|
||||
elif st == '-D':
|
||||
if not ot: ot = lst.pop(0)
|
||||
app('CXXDEFINES_' + uselib, ot)
|
||||
app('CCDEFINES_' + uselib, ot)
|
||||
elif st == '-l':
|
||||
if not ot: ot = lst.pop(0)
|
||||
app('LIB_' + uselib, ot)
|
||||
elif st == '-L':
|
||||
if not ot: ot = lst.pop(0)
|
||||
app('LIBPATH_' + uselib, ot)
|
||||
elif x == '-pthread' or x.startswith('+'):
|
||||
app('CCFLAGS_' + uselib, x)
|
||||
app('CXXFLAGS_' + uselib, x)
|
||||
app('LINKFLAGS_' + uselib, x)
|
||||
elif x == '-framework':
|
||||
app('FRAMEWORK_' + uselib, lst.pop(0))
|
||||
elif x.startswith('-F'):
|
||||
app('FRAMEWORKPATH_' + uselib, x[2:])
|
||||
elif x.startswith('-std'):
|
||||
app('CCFLAGS_' + uselib, x)
|
||||
app('CXXFLAGS_' + uselib, x)
|
||||
app('LINKFLAGS_' + uselib, x)
|
||||
#
|
||||
# NOTE on special treatment of -Wl,-R and -Wl,-rpath:
|
||||
#
|
||||
# It is important to not put a library provided RPATH
|
||||
# into the LINKFLAGS but in the RPATH instead, since
|
||||
# the provided LINKFLAGS get prepended to our own internal
|
||||
# RPATH later, and hence can potentially lead to linking
|
||||
# in too old versions of our internal libs.
|
||||
#
|
||||
elif x == '-Wl,-rpath' or x == '-Wl,-R':
|
||||
app('RPATH_' + uselib, lst.pop(0).lstrip('-Wl,'))
|
||||
elif x.startswith('-Wl,-R,'):
|
||||
app('RPATH_' + uselib, x[7:])
|
||||
elif x.startswith('-Wl,-R'):
|
||||
app('RPATH_' + uselib, x[6:])
|
||||
elif x.startswith('-Wl,-rpath,'):
|
||||
app('RPATH_' + uselib, x[11:])
|
||||
elif x.startswith('-Wl'):
|
||||
app('LINKFLAGS_' + uselib, x)
|
||||
elif x.startswith('-m') or x.startswith('-f'):
|
||||
app('CCFLAGS_' + uselib, x)
|
||||
app('CXXFLAGS_' + uselib, x)
|
||||
|
||||
@conf
|
||||
def ret_msg(self, f, kw):
|
||||
"""execute a function, when provided"""
|
||||
if isinstance(f, str):
|
||||
return f
|
||||
return f(kw)
|
||||
|
||||
@conf
|
||||
def validate_cfg(self, kw):
|
||||
if not 'path' in kw:
|
||||
if not self.env.PKGCONFIG:
|
||||
self.find_program('pkg-config', var='PKGCONFIG')
|
||||
kw['path'] = self.env.PKGCONFIG
|
||||
|
||||
# pkg-config version
|
||||
if 'atleast_pkgconfig_version' in kw:
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
|
||||
return
|
||||
|
||||
# pkg-config --modversion
|
||||
if 'modversion' in kw:
|
||||
return
|
||||
|
||||
if 'variables' in kw:
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for %s variables' % kw['package']
|
||||
return
|
||||
|
||||
# checking for the version of a module, for the moment, one thing at a time
|
||||
for x in cfg_ver.keys():
|
||||
y = x.replace('-', '_')
|
||||
if y in kw:
|
||||
if not 'package' in kw:
|
||||
raise ValueError('%s requires a package' % x)
|
||||
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
|
||||
return
|
||||
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'yes'
|
||||
if not 'errmsg' in kw:
|
||||
kw['errmsg'] = 'not found'
|
||||
|
||||
@conf
|
||||
def cmd_and_log(self, cmd, kw):
|
||||
Logs.debug('runner: %s\n' % cmd)
|
||||
if self.log:
|
||||
self.log.write('%s\n' % cmd)
|
||||
|
||||
try:
|
||||
p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
|
||||
(out, err) = p.communicate()
|
||||
except OSError, e:
|
||||
self.log.write('error %r' % e)
|
||||
self.fatal(str(e))
|
||||
|
||||
# placeholder, don't touch
|
||||
out = str(out)
|
||||
err = str(err)
|
||||
|
||||
if self.log:
|
||||
self.log.write(out)
|
||||
self.log.write(err)
|
||||
|
||||
if p.returncode:
|
||||
if not kw.get('errmsg', ''):
|
||||
if kw.get('mandatory', False):
|
||||
kw['errmsg'] = out.strip()
|
||||
else:
|
||||
kw['errmsg'] = 'no'
|
||||
self.fatal('fail')
|
||||
return out
|
||||
|
||||
@conf
|
||||
def exec_cfg(self, kw):
|
||||
|
||||
# pkg-config version
|
||||
if 'atleast_pkgconfig_version' in kw:
|
||||
cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
|
||||
self.cmd_and_log(cmd, kw)
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'yes'
|
||||
return
|
||||
|
||||
# checking for the version of a module
|
||||
for x in cfg_ver:
|
||||
y = x.replace('-', '_')
|
||||
if y in kw:
|
||||
self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'yes'
|
||||
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
|
||||
break
|
||||
|
||||
# retrieving the version of a module
|
||||
if 'modversion' in kw:
|
||||
version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
|
||||
self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
|
||||
return version
|
||||
|
||||
# retrieving variables of a module
|
||||
if 'variables' in kw:
|
||||
env = kw.get('env', self.env)
|
||||
uselib = kw.get('uselib_store', kw['package'].upper())
|
||||
vars = Utils.to_list(kw['variables'])
|
||||
for v in vars:
|
||||
val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
|
||||
var = '%s_%s' % (uselib, v)
|
||||
env[var] = val
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'yes'
|
||||
return
|
||||
|
||||
lst = [kw['path']]
|
||||
|
||||
|
||||
defi = kw.get('define_variable', None)
|
||||
if not defi:
|
||||
defi = self.env.PKG_CONFIG_DEFINES or {}
|
||||
for key, val in defi.iteritems():
|
||||
lst.append('--define-variable=%s=%s' % (key, val))
|
||||
|
||||
lst.append(kw.get('args', ''))
|
||||
lst.append(kw['package'])
|
||||
|
||||
# so we assume the command-line will output flags to be parsed afterwards
|
||||
cmd = ' '.join(lst)
|
||||
ret = self.cmd_and_log(cmd, kw)
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'yes'
|
||||
|
||||
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
|
||||
parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
|
||||
return ret
|
||||
|
||||
@conf
|
||||
def check_cfg(self, *k, **kw):
|
||||
"""
|
||||
for pkg-config mostly, but also all the -config tools
|
||||
conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
|
||||
conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
|
||||
"""
|
||||
|
||||
self.validate_cfg(kw)
|
||||
if 'msg' in kw:
|
||||
self.check_message_1(kw['msg'])
|
||||
ret = None
|
||||
try:
|
||||
ret = self.exec_cfg(kw)
|
||||
except Configure.ConfigurationError, e:
|
||||
if 'errmsg' in kw:
|
||||
self.check_message_2(kw['errmsg'], 'YELLOW')
|
||||
if 'mandatory' in kw and kw['mandatory']:
|
||||
if Logs.verbose > 1:
|
||||
raise
|
||||
else:
|
||||
self.fatal('the configuration failed (see %r)' % self.log.name)
|
||||
else:
|
||||
kw['success'] = ret
|
||||
if 'okmsg' in kw:
|
||||
self.check_message_2(self.ret_msg(kw['okmsg'], kw))
|
||||
|
||||
return ret
|
||||
|
||||
# the idea is the following: now that we are certain
|
||||
# that all the code here is only for c or c++, it is
|
||||
# easy to put all the logic in one function
|
||||
#
|
||||
# this should prevent code duplication (ita)
|
||||
|
||||
# env: an optional environment (modified -> provide a copy)
|
||||
# compiler: cc or cxx - it tries to guess what is best
|
||||
# type: cprogram, cshlib, cstaticlib
|
||||
# code: a c code to execute
|
||||
# uselib_store: where to add the variables
|
||||
# uselib: parameters to use for building
|
||||
# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
|
||||
# execute: True or False - will return the result of the execution
|
||||
|
||||
@conf
|
||||
def validate_c(self, kw):
|
||||
"""validate the parameters for the test method"""
|
||||
|
||||
if not 'env' in kw:
|
||||
kw['env'] = self.env.copy()
|
||||
|
||||
env = kw['env']
|
||||
if not 'compiler' in kw:
|
||||
kw['compiler'] = 'cc'
|
||||
if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
|
||||
kw['compiler'] = 'cxx'
|
||||
if not self.env['CXX']:
|
||||
self.fatal('a c++ compiler is required')
|
||||
else:
|
||||
if not self.env['CC']:
|
||||
self.fatal('a c compiler is required')
|
||||
|
||||
if not 'type' in kw:
|
||||
kw['type'] = 'cprogram'
|
||||
|
||||
assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
|
||||
|
||||
|
||||
#if kw['type'] != 'program' and kw.get('execute', 0):
|
||||
# raise ValueError, 'can only execute programs'
|
||||
|
||||
def to_header(dct):
|
||||
if 'header_name' in dct:
|
||||
dct = Utils.to_list(dct['header_name'])
|
||||
return ''.join(['#include <%s>\n' % x for x in dct])
|
||||
return ''
|
||||
|
||||
# set the file name
|
||||
if not 'compile_mode' in kw:
|
||||
kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
|
||||
|
||||
if not 'compile_filename' in kw:
|
||||
kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
|
||||
|
||||
#OSX
|
||||
if 'framework_name' in kw:
|
||||
try: TaskGen.task_gen.create_task_macapp
|
||||
except AttributeError: self.fatal('frameworks require the osx tool')
|
||||
|
||||
fwkname = kw['framework_name']
|
||||
if not 'uselib_store' in kw:
|
||||
kw['uselib_store'] = fwkname.upper()
|
||||
|
||||
if not kw.get('no_header', False):
|
||||
if not 'header_name' in kw:
|
||||
kw['header_name'] = []
|
||||
fwk = '%s/%s.h' % (fwkname, fwkname)
|
||||
if kw.get('remove_dot_h', None):
|
||||
fwk = fwk[:-2]
|
||||
kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
|
||||
|
||||
kw['msg'] = 'Checking for framework %s' % fwkname
|
||||
kw['framework'] = fwkname
|
||||
#kw['frameworkpath'] = set it yourself
|
||||
|
||||
if 'function_name' in kw:
|
||||
fu = kw['function_name']
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for function %s' % fu
|
||||
kw['code'] = to_header(kw) + SNIP1 % fu
|
||||
if not 'uselib_store' in kw:
|
||||
kw['uselib_store'] = fu.upper()
|
||||
if not 'define_name' in kw:
|
||||
kw['define_name'] = self.have_define(fu)
|
||||
|
||||
elif 'type_name' in kw:
|
||||
tu = kw['type_name']
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for type %s' % tu
|
||||
if not 'header_name' in kw:
|
||||
kw['header_name'] = 'stdint.h'
|
||||
kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
|
||||
if not 'define_name' in kw:
|
||||
kw['define_name'] = self.have_define(tu.upper())
|
||||
|
||||
elif 'header_name' in kw:
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for header %s' % kw['header_name']
|
||||
|
||||
l = Utils.to_list(kw['header_name'])
|
||||
assert len(l)>0, 'list of headers in header_name is empty'
|
||||
|
||||
kw['code'] = to_header(kw) + SNIP3
|
||||
|
||||
if not 'uselib_store' in kw:
|
||||
kw['uselib_store'] = l[0].upper()
|
||||
|
||||
if not 'define_name' in kw:
|
||||
kw['define_name'] = self.have_define(l[0])
|
||||
|
||||
if 'lib' in kw:
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for library %s' % kw['lib']
|
||||
if not 'uselib_store' in kw:
|
||||
kw['uselib_store'] = kw['lib'].upper()
|
||||
|
||||
if 'staticlib' in kw:
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for static library %s' % kw['staticlib']
|
||||
if not 'uselib_store' in kw:
|
||||
kw['uselib_store'] = kw['staticlib'].upper()
|
||||
|
||||
if 'fragment' in kw:
|
||||
# an additional code fragment may be provided to replace the predefined code
|
||||
# in custom headers
|
||||
kw['code'] = kw['fragment']
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for custom code'
|
||||
if not 'errmsg' in kw:
|
||||
kw['errmsg'] = 'no'
|
||||
|
||||
for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
|
||||
if flagsname in kw:
|
||||
if not 'msg' in kw:
|
||||
kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
|
||||
if not 'errmsg' in kw:
|
||||
kw['errmsg'] = 'no'
|
||||
|
||||
if not 'execute' in kw:
|
||||
kw['execute'] = False
|
||||
|
||||
if not 'errmsg' in kw:
|
||||
kw['errmsg'] = 'not found'
|
||||
|
||||
if not 'okmsg' in kw:
|
||||
kw['okmsg'] = 'yes'
|
||||
|
||||
if not 'code' in kw:
|
||||
kw['code'] = SNIP3
|
||||
|
||||
if not kw.get('success'): kw['success'] = None
|
||||
|
||||
assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
|
||||
|
||||
@conf
|
||||
def post_check(self, *k, **kw):
|
||||
"set the variables after a test was run successfully"
|
||||
|
||||
is_success = False
|
||||
if kw['execute']:
|
||||
if kw['success'] is not None:
|
||||
is_success = True
|
||||
else:
|
||||
is_success = (kw['success'] == 0)
|
||||
|
||||
if 'define_name' in kw:
|
||||
if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
|
||||
if kw['execute']:
|
||||
key = kw['success']
|
||||
if isinstance(key, str):
|
||||
if key:
|
||||
self.define(kw['define_name'], key, quote=kw.get('quote', 1))
|
||||
else:
|
||||
self.define_cond(kw['define_name'], True)
|
||||
else:
|
||||
self.define_cond(kw['define_name'], False)
|
||||
else:
|
||||
self.define_cond(kw['define_name'], is_success)
|
||||
|
||||
if is_success and 'uselib_store' in kw:
|
||||
import cc, cxx
|
||||
for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
|
||||
lk = k.lower()
|
||||
# inconsistency: includes -> CPPPATH
|
||||
if k == 'CPPPATH': lk = 'includes'
|
||||
if k == 'CXXDEFINES': lk = 'defines'
|
||||
if k == 'CCDEFINES': lk = 'defines'
|
||||
if lk in kw:
|
||||
val = kw[lk]
|
||||
# remove trailing slash
|
||||
if isinstance(val, str):
|
||||
val = val.rstrip(os.path.sep)
|
||||
self.env.append_unique(k + '_' + kw['uselib_store'], val)
|
||||
|
||||
@conf
|
||||
def check(self, *k, **kw):
|
||||
# so this will be the generic function
|
||||
# it will be safer to use check_cxx or check_cc
|
||||
self.validate_c(kw)
|
||||
self.check_message_1(kw['msg'])
|
||||
ret = None
|
||||
try:
|
||||
ret = self.run_c_code(*k, **kw)
|
||||
except Configure.ConfigurationError, e:
|
||||
self.check_message_2(kw['errmsg'], 'YELLOW')
|
||||
if 'mandatory' in kw and kw['mandatory']:
|
||||
if Logs.verbose > 1:
|
||||
raise
|
||||
else:
|
||||
self.fatal('the configuration failed (see %r)' % self.log.name)
|
||||
else:
|
||||
kw['success'] = ret
|
||||
self.check_message_2(self.ret_msg(kw['okmsg'], kw))
|
||||
|
||||
self.post_check(*k, **kw)
|
||||
if not kw.get('execute', False):
|
||||
return ret == 0
|
||||
return ret
|
||||
|
||||
@conf
|
||||
def run_c_code(self, *k, **kw):
|
||||
test_f_name = kw['compile_filename']
|
||||
|
||||
k = 0
|
||||
while k < 10000:
|
||||
# make certain to use a fresh folder - necessary for win32
|
||||
dir = os.path.join(self.blddir, '.conf_check_%d' % k)
|
||||
|
||||
# if the folder already exists, remove it
|
||||
try:
|
||||
shutil.rmtree(dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.stat(dir)
|
||||
except OSError:
|
||||
break
|
||||
|
||||
k += 1
|
||||
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except:
|
||||
self.fatal('cannot create a configuration test folder %r' % dir)
|
||||
|
||||
try:
|
||||
os.stat(dir)
|
||||
except:
|
||||
self.fatal('cannot use the configuration test folder %r' % dir)
|
||||
|
||||
bdir = os.path.join(dir, 'testbuild')
|
||||
|
||||
if not os.path.exists(bdir):
|
||||
os.makedirs(bdir)
|
||||
|
||||
env = kw['env']
|
||||
|
||||
dest = open(os.path.join(dir, test_f_name), 'w')
|
||||
dest.write(kw['code'])
|
||||
dest.close()
|
||||
|
||||
back = os.path.abspath('.')
|
||||
|
||||
bld = Build.BuildContext()
|
||||
bld.log = self.log
|
||||
bld.all_envs.update(self.all_envs)
|
||||
bld.all_envs['default'] = env
|
||||
bld.lst_variants = bld.all_envs.keys()
|
||||
bld.load_dirs(dir, bdir)
|
||||
|
||||
os.chdir(dir)
|
||||
|
||||
bld.rescan(bld.srcnode)
|
||||
|
||||
if not 'features' in kw:
|
||||
# conf.check(features='cc cprogram pyext', ...)
|
||||
kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
|
||||
|
||||
o = bld(features=kw['features'], source=test_f_name, target='testprog')
|
||||
|
||||
for k, v in kw.iteritems():
|
||||
setattr(o, k, v)
|
||||
|
||||
self.log.write("==>\n%s\n<==\n" % kw['code'])
|
||||
|
||||
# compile the program
|
||||
try:
|
||||
bld.compile()
|
||||
except Utils.WafError:
|
||||
ret = Utils.ex_stack()
|
||||
else:
|
||||
ret = 0
|
||||
|
||||
# chdir before returning
|
||||
os.chdir(back)
|
||||
|
||||
if ret:
|
||||
self.log.write('command returned %r' % ret)
|
||||
self.fatal(str(ret))
|
||||
|
||||
# if we need to run the program, try to get its result
|
||||
# keep the name of the program to execute
|
||||
if kw['execute']:
|
||||
lastprog = o.link_task.outputs[0].abspath(env)
|
||||
|
||||
args = Utils.to_list(kw.get('exec_args', []))
|
||||
proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
|
||||
(out, err) = proc.communicate()
|
||||
w = self.log.write
|
||||
w(str(out))
|
||||
w('\n')
|
||||
w(str(err))
|
||||
w('\n')
|
||||
w('returncode %r' % proc.returncode)
|
||||
w('\n')
|
||||
if proc.returncode:
|
||||
self.fatal(Utils.ex_stack())
|
||||
ret = out
|
||||
|
||||
return ret
|
||||
|
||||
@conf
|
||||
def check_cxx(self, *k, **kw):
|
||||
kw['compiler'] = 'cxx'
|
||||
return self.check(*k, **kw)
|
||||
|
||||
@conf
|
||||
def check_cc(self, *k, **kw):
|
||||
kw['compiler'] = 'cc'
|
||||
return self.check(*k, **kw)
|
||||
|
||||
@conf
|
||||
def define(self, define, value, quote=1):
|
||||
"""store a single define and its state into an internal list for later
|
||||
writing to a config header file. Value can only be
|
||||
a string or int; other types not supported. String
|
||||
values will appear properly quoted in the generated
|
||||
header file."""
|
||||
assert define and isinstance(define, str)
|
||||
|
||||
# ordered_dict is for writing the configuration header in order
|
||||
tbl = self.env[DEFINES] or Utils.ordered_dict()
|
||||
|
||||
# the user forgot to tell if the value is quoted or not
|
||||
if isinstance(value, str):
|
||||
if quote:
|
||||
tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
|
||||
else:
|
||||
tbl[define] = value
|
||||
elif isinstance(value, int):
|
||||
tbl[define] = value
|
||||
else:
|
||||
raise TypeError('define %r -> %r must be a string or an int' % (define, value))
|
||||
|
||||
# add later to make reconfiguring faster
|
||||
self.env[DEFINES] = tbl
|
||||
self.env[define] = value # <- not certain this is necessary
|
||||
|
||||
@conf
|
||||
def undefine(self, define):
|
||||
"""store a single define and its state into an internal list
|
||||
for later writing to a config header file"""
|
||||
assert define and isinstance(define, str)
|
||||
|
||||
tbl = self.env[DEFINES] or Utils.ordered_dict()
|
||||
|
||||
value = UNDEFINED
|
||||
tbl[define] = value
|
||||
|
||||
# add later to make reconfiguring faster
|
||||
self.env[DEFINES] = tbl
|
||||
self.env[define] = value
|
||||
|
||||
@conf
|
||||
def define_cond(self, name, value):
|
||||
"""Conditionally define a name.
|
||||
Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
|
||||
if value:
|
||||
self.define(name, 1)
|
||||
else:
|
||||
self.undefine(name)
|
||||
|
||||
@conf
|
||||
def is_defined(self, key):
|
||||
defines = self.env[DEFINES]
|
||||
if not defines:
|
||||
return False
|
||||
try:
|
||||
value = defines[key]
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
return value != UNDEFINED
|
||||
|
||||
@conf
|
||||
def get_define(self, define):
|
||||
"get the value of a previously stored define"
|
||||
try: return self.env[DEFINES][define]
|
||||
except KeyError: return None
|
||||
|
||||
@conf
|
||||
def have_define(self, name):
|
||||
"prefix the define with 'HAVE_' and make sure it has valid characters."
|
||||
return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
|
||||
|
||||
@conf
|
||||
def write_config_header(self, configfile='', env='', guard='', top=False):
|
||||
"save the defines into a file"
|
||||
if not configfile: configfile = WAF_CONFIG_H
|
||||
waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
|
||||
|
||||
# configfile -> absolute path
|
||||
# there is a good reason to concatenate first and to split afterwards
|
||||
if not env: env = self.env
|
||||
if top:
|
||||
diff = ''
|
||||
else:
|
||||
diff = Utils.diff_path(self.srcdir, self.curdir)
|
||||
full = os.sep.join([self.blddir, env.variant(), diff, configfile])
|
||||
full = os.path.normpath(full)
|
||||
(dir, base) = os.path.split(full)
|
||||
|
||||
try: os.makedirs(dir)
|
||||
except: pass
|
||||
|
||||
dest = open(full, 'w')
|
||||
dest.write('/* Configuration header created by Waf - do not edit */\n')
|
||||
dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
|
||||
|
||||
dest.write(self.get_config_header())
|
||||
|
||||
# config files are not removed on "waf clean"
|
||||
env.append_unique(CFG_FILES, os.path.join(diff, configfile))
|
||||
|
||||
dest.write('\n#endif /* %s */\n' % waf_guard)
|
||||
dest.close()
|
||||
|
||||
@conf
|
||||
def get_config_header(self):
|
||||
"""Fill-in the contents of the config header. Override when you need to write your own config header."""
|
||||
config_header = []
|
||||
|
||||
tbl = self.env[DEFINES] or Utils.ordered_dict()
|
||||
for key in tbl.allkeys:
|
||||
value = tbl[key]
|
||||
if value is None:
|
||||
config_header.append('#define %s' % key)
|
||||
elif value is UNDEFINED:
|
||||
config_header.append('/* #undef %s */' % key)
|
||||
else:
|
||||
config_header.append('#define %s %s' % (key, value))
|
||||
return "\n".join(config_header)
|
||||
|
||||
@conftest
|
||||
def find_cpp(conf):
|
||||
v = conf.env
|
||||
cpp = []
|
||||
if v['CPP']: cpp = v['CPP']
|
||||
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
|
||||
if not cpp: cpp = conf.find_program('cpp', var='CPP')
|
||||
#if not cpp: cpp = v['CC']
|
||||
#if not cpp: cpp = v['CXX']
|
||||
v['CPP'] = cpp
|
||||
|
||||
@conftest
|
||||
def cc_add_flags(conf):
|
||||
conf.add_os_flags('CFLAGS', 'CCFLAGS')
|
||||
conf.add_os_flags('CPPFLAGS')
|
||||
|
||||
@conftest
|
||||
def cxx_add_flags(conf):
|
||||
conf.add_os_flags('CXXFLAGS')
|
||||
conf.add_os_flags('CPPFLAGS')
|
||||
|
||||
@conftest
|
||||
def link_add_flags(conf):
|
||||
conf.add_os_flags('LINKFLAGS')
|
||||
conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
|
||||
|
||||
@conftest
|
||||
def cc_load_tools(conf):
|
||||
conf.check_tool('cc')
|
||||
|
||||
@conftest
|
||||
def cxx_load_tools(conf):
|
||||
conf.check_tool('cxx')
|
67
third_party/waf/wafadmin/Tools/cs.py
vendored
67
third_party/waf/wafadmin/Tools/cs.py
vendored
@ -1,67 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"C# support"
|
||||
|
||||
import TaskGen, Utils, Task, Options
|
||||
from Logs import error
|
||||
from TaskGen import before, after, taskgen, feature
|
||||
|
||||
flag_vars= ['FLAGS', 'ASSEMBLIES']
|
||||
|
||||
@feature('cs')
|
||||
def init_cs(self):
|
||||
Utils.def_attrs(self,
|
||||
flags = '',
|
||||
assemblies = '',
|
||||
resources = '',
|
||||
uselib = '')
|
||||
|
||||
@feature('cs')
|
||||
@after('init_cs')
|
||||
def apply_uselib_cs(self):
|
||||
if not self.uselib:
|
||||
return
|
||||
global flag_vars
|
||||
for var in self.to_list(self.uselib):
|
||||
for v in self.flag_vars:
|
||||
val = self.env[v+'_'+var]
|
||||
if val: self.env.append_value(v, val)
|
||||
|
||||
@feature('cs')
|
||||
@after('apply_uselib_cs')
|
||||
@before('apply_core')
|
||||
def apply_cs(self):
|
||||
try: self.meths.remove('apply_core')
|
||||
except ValueError: pass
|
||||
|
||||
# process the flags for the assemblies
|
||||
for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
|
||||
self.env.append_unique('_ASSEMBLIES', '/r:'+i)
|
||||
|
||||
# process the flags for the resources
|
||||
for i in self.to_list(self.resources):
|
||||
self.env.append_unique('_RESOURCES', '/resource:'+i)
|
||||
|
||||
# what kind of assembly are we generating?
|
||||
self.env['_TYPE'] = getattr(self, 'type', 'exe')
|
||||
|
||||
# additional flags
|
||||
self.env.append_unique('_FLAGS', self.to_list(self.flags))
|
||||
self.env.append_unique('_FLAGS', self.env.FLAGS)
|
||||
|
||||
# process the sources
|
||||
nodes = [self.path.find_resource(i) for i in self.to_list(self.source)]
|
||||
self.create_task('mcs', nodes, self.path.find_or_declare(self.target))
|
||||
|
||||
Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
|
||||
|
||||
def detect(conf):
|
||||
csc = getattr(Options.options, 'cscbinary', None)
|
||||
if csc:
|
||||
conf.env.MCS = csc
|
||||
conf.find_program(['gmcs', 'mcs'], var='MCS')
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
|
103
third_party/waf/wafadmin/Tools/cxx.py
vendored
103
third_party/waf/wafadmin/Tools/cxx.py
vendored
@ -1,103 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005 (ita)
|
||||
|
||||
"Base for c++ programs and libraries"
|
||||
|
||||
import TaskGen, Task, Utils
|
||||
from Logs import debug
|
||||
import ccroot # <- do not remove
|
||||
from TaskGen import feature, before, extension, after
|
||||
|
||||
g_cxx_flag_vars = [
|
||||
'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
|
||||
'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
|
||||
'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
|
||||
"main cpp variables"
|
||||
|
||||
EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
|
||||
|
||||
g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
|
||||
|
||||
# TODO remove in waf 1.6
|
||||
class cxx_taskgen(ccroot.ccroot_abstract):
|
||||
pass
|
||||
|
||||
@feature('cxx')
|
||||
@before('apply_type_vars')
|
||||
@after('default_cc')
|
||||
def init_cxx(self):
|
||||
if not 'cc' in self.features:
|
||||
self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
|
||||
|
||||
self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
|
||||
self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
|
||||
|
||||
if not self.env['CXX_NAME']:
|
||||
raise Utils.WafError("At least one compiler (g++, ..) must be selected")
|
||||
|
||||
@feature('cxx')
|
||||
@after('apply_incpaths')
|
||||
def apply_obj_vars_cxx(self):
|
||||
"""after apply_incpaths for INC_PATHS"""
|
||||
env = self.env
|
||||
app = env.append_unique
|
||||
cxxpath_st = env['CPPPATH_ST']
|
||||
|
||||
# local flags come first
|
||||
# set the user-defined includes paths
|
||||
for i in env['INC_PATHS']:
|
||||
app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
|
||||
app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
|
||||
|
||||
# set the library include paths
|
||||
for i in env['CPPPATH']:
|
||||
app('_CXXINCFLAGS', cxxpath_st % i)
|
||||
|
||||
@feature('cxx')
|
||||
@after('apply_lib_vars')
|
||||
def apply_defines_cxx(self):
|
||||
"""after uselib is set for CXXDEFINES"""
|
||||
self.defines = getattr(self, 'defines', [])
|
||||
lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
|
||||
milst = []
|
||||
|
||||
# now process the local defines
|
||||
for defi in lst:
|
||||
if not defi in milst:
|
||||
milst.append(defi)
|
||||
|
||||
# CXXDEFINES_USELIB
|
||||
libs = self.to_list(self.uselib)
|
||||
for l in libs:
|
||||
val = self.env['CXXDEFINES_'+l]
|
||||
if val: milst += self.to_list(val)
|
||||
|
||||
self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
|
||||
y = self.env['CXXDEFINES_ST']
|
||||
self.env.append_unique('_CXXDEFFLAGS', [y%x for x in milst])
|
||||
|
||||
@extension(EXT_CXX)
|
||||
def cxx_hook(self, node):
|
||||
# create the compilation task: cpp or cc
|
||||
if getattr(self, 'obj_ext', None):
|
||||
obj_ext = self.obj_ext
|
||||
else:
|
||||
obj_ext = '_%d.o' % self.idx
|
||||
|
||||
task = self.create_task('cxx', node, node.change_ext(obj_ext))
|
||||
try:
|
||||
self.compiled_tasks.append(task)
|
||||
except AttributeError:
|
||||
raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
|
||||
return task
|
||||
|
||||
cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
|
||||
cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
|
||||
cls.scan = ccroot.scan
|
||||
cls.vars.append('CXXDEPS')
|
||||
|
||||
link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
|
||||
cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
|
||||
cls.maxjobs = 1
|
||||
cls.install = Utils.nada
|
534
third_party/waf/wafadmin/Tools/d.py
vendored
534
third_party/waf/wafadmin/Tools/d.py
vendored
@ -1,534 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
# Thomas Nagy, 2007-2008 (ita)
|
||||
|
||||
import os, sys, re, optparse
|
||||
import ccroot # <- leave this
|
||||
import TaskGen, Utils, Task, Configure, Logs, Build
|
||||
from Logs import debug, error
|
||||
from TaskGen import taskgen, feature, after, before, extension
|
||||
from Configure import conftest
|
||||
|
||||
EXT_D = ['.d', '.di', '.D']
|
||||
D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
|
||||
|
||||
DLIB = """
|
||||
version(D_Version2) {
|
||||
import std.stdio;
|
||||
int main() {
|
||||
writefln("phobos2");
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
version(Tango) {
|
||||
import tango.stdc.stdio;
|
||||
int main() {
|
||||
printf("tango");
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
import std.stdio;
|
||||
int main() {
|
||||
writefln("phobos1");
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
def filter_comments(filename):
|
||||
txt = Utils.readf(filename)
|
||||
i = 0
|
||||
buf = []
|
||||
max = len(txt)
|
||||
begin = 0
|
||||
while i < max:
|
||||
c = txt[i]
|
||||
if c == '"' or c == "'": # skip a string or character literal
|
||||
buf.append(txt[begin:i])
|
||||
delim = c
|
||||
i += 1
|
||||
while i < max:
|
||||
c = txt[i]
|
||||
if c == delim: break
|
||||
elif c == '\\': # skip the character following backslash
|
||||
i += 1
|
||||
i += 1
|
||||
i += 1
|
||||
begin = i
|
||||
elif c == '/': # try to replace a comment with whitespace
|
||||
buf.append(txt[begin:i])
|
||||
i += 1
|
||||
if i == max: break
|
||||
c = txt[i]
|
||||
if c == '+': # eat nesting /+ +/ comment
|
||||
i += 1
|
||||
nesting = 1
|
||||
c = None
|
||||
while i < max:
|
||||
prev = c
|
||||
c = txt[i]
|
||||
if prev == '/' and c == '+':
|
||||
nesting += 1
|
||||
c = None
|
||||
elif prev == '+' and c == '/':
|
||||
nesting -= 1
|
||||
if nesting == 0: break
|
||||
c = None
|
||||
i += 1
|
||||
elif c == '*': # eat /* */ comment
|
||||
i += 1
|
||||
c = None
|
||||
while i < max:
|
||||
prev = c
|
||||
c = txt[i]
|
||||
if prev == '*' and c == '/': break
|
||||
i += 1
|
||||
elif c == '/': # eat // comment
|
||||
i += 1
|
||||
while i < max and txt[i] != '\n':
|
||||
i += 1
|
||||
else: # no comment
|
||||
begin = i - 1
|
||||
continue
|
||||
i += 1
|
||||
begin = i
|
||||
buf.append(' ')
|
||||
else:
|
||||
i += 1
|
||||
buf.append(txt[begin:])
|
||||
return buf
|
||||
|
||||
class d_parser(object):
|
||||
def __init__(self, env, incpaths):
|
||||
#self.code = ''
|
||||
#self.module = ''
|
||||
#self.imports = []
|
||||
|
||||
self.allnames = []
|
||||
|
||||
self.re_module = re.compile("module\s+([^;]+)")
|
||||
self.re_import = re.compile("import\s+([^;]+)")
|
||||
self.re_import_bindings = re.compile("([^:]+):(.*)")
|
||||
self.re_import_alias = re.compile("[^=]+=(.+)")
|
||||
|
||||
self.env = env
|
||||
|
||||
self.nodes = []
|
||||
self.names = []
|
||||
|
||||
self.incpaths = incpaths
|
||||
|
||||
def tryfind(self, filename):
|
||||
found = 0
|
||||
for n in self.incpaths:
|
||||
found = n.find_resource(filename.replace('.', '/') + '.d')
|
||||
if found:
|
||||
self.nodes.append(found)
|
||||
self.waiting.append(found)
|
||||
break
|
||||
if not found:
|
||||
if not filename in self.names:
|
||||
self.names.append(filename)
|
||||
|
||||
def get_strings(self, code):
|
||||
#self.imports = []
|
||||
self.module = ''
|
||||
lst = []
|
||||
|
||||
# get the module name (if present)
|
||||
|
||||
mod_name = self.re_module.search(code)
|
||||
if mod_name:
|
||||
self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
|
||||
|
||||
# go through the code, have a look at all import occurrences
|
||||
|
||||
# first, lets look at anything beginning with "import" and ending with ";"
|
||||
import_iterator = self.re_import.finditer(code)
|
||||
if import_iterator:
|
||||
for import_match in import_iterator:
|
||||
import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
|
||||
|
||||
# does this end with an import bindings declaration?
|
||||
# (import bindings always terminate the list of imports)
|
||||
bindings_match = self.re_import_bindings.match(import_match_str)
|
||||
if bindings_match:
|
||||
import_match_str = bindings_match.group(1)
|
||||
# if so, extract the part before the ":" (since the module declaration(s) is/are located there)
|
||||
|
||||
# split the matching string into a bunch of strings, separated by a comma
|
||||
matches = import_match_str.split(',')
|
||||
|
||||
for match in matches:
|
||||
alias_match = self.re_import_alias.match(match)
|
||||
if alias_match:
|
||||
# is this an alias declaration? (alias = module name) if so, extract the module name
|
||||
match = alias_match.group(1)
|
||||
|
||||
lst.append(match)
|
||||
return lst
|
||||
|
||||
def start(self, node):
|
||||
self.waiting = [node]
|
||||
# while the stack is not empty, add the dependencies
|
||||
while self.waiting:
|
||||
nd = self.waiting.pop(0)
|
||||
self.iter(nd)
|
||||
|
||||
def iter(self, node):
|
||||
path = node.abspath(self.env) # obtain the absolute path
|
||||
code = "".join(filter_comments(path)) # read the file and filter the comments
|
||||
names = self.get_strings(code) # obtain the import strings
|
||||
for x in names:
|
||||
# optimization
|
||||
if x in self.allnames: continue
|
||||
self.allnames.append(x)
|
||||
|
||||
# for each name, see if it is like a node or not
|
||||
self.tryfind(x)
|
||||
|
||||
def scan(self):
|
||||
"look for .d/.di the .d source need"
|
||||
env = self.env
|
||||
gruik = d_parser(env, env['INC_PATHS'])
|
||||
gruik.start(self.inputs[0])
|
||||
|
||||
if Logs.verbose:
|
||||
debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
|
||||
#debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
|
||||
return (gruik.nodes, gruik.names)
|
||||
|
||||
def get_target_name(self):
|
||||
"for d programs and libs"
|
||||
v = self.env
|
||||
tp = 'program'
|
||||
for x in self.features:
|
||||
if x in ['dshlib', 'dstaticlib']:
|
||||
tp = x.lstrip('d')
|
||||
return v['D_%s_PATTERN' % tp] % self.target
|
||||
|
||||
d_params = {
|
||||
'dflags': '',
|
||||
'importpaths':'',
|
||||
'libs':'',
|
||||
'libpaths':'',
|
||||
'generate_headers':False,
|
||||
}
|
||||
|
||||
@feature('d')
|
||||
@before('apply_type_vars')
|
||||
def init_d(self):
|
||||
for x in d_params:
|
||||
setattr(self, x, getattr(self, x, d_params[x]))
|
||||
|
||||
class d_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
# COMPAT
|
||||
if len(k) > 1:
|
||||
self.features.append('d' + k[1])
|
||||
|
||||
# okay, we borrow a few methods from ccroot
|
||||
TaskGen.bind_feature('d', D_METHS)
|
||||
|
||||
@feature('d')
|
||||
@before('apply_d_libs')
|
||||
def init_d(self):
|
||||
Utils.def_attrs(self,
|
||||
dflags='',
|
||||
importpaths='',
|
||||
libs='',
|
||||
libpaths='',
|
||||
uselib='',
|
||||
uselib_local='',
|
||||
generate_headers=False, # set to true if you want .di files as well as .o
|
||||
compiled_tasks=[],
|
||||
add_objects=[],
|
||||
link_task=None)
|
||||
|
||||
@feature('d')
|
||||
@after('apply_d_link', 'init_d')
|
||||
@before('apply_vnum', 'apply_d_vars')
|
||||
def apply_d_libs(self):
|
||||
"""after apply_link because of 'link_task'
|
||||
after default_cc because of the attribute 'uselib'"""
|
||||
env = self.env
|
||||
|
||||
# 1. the case of the libs defined in the project (visit ancestors first)
|
||||
# the ancestors external libraries (uselib) will be prepended
|
||||
self.uselib = self.to_list(self.uselib)
|
||||
names = self.to_list(self.uselib_local)
|
||||
|
||||
seen = set([])
|
||||
tmp = Utils.deque(names) # consume a copy of the list of names
|
||||
while tmp:
|
||||
lib_name = tmp.popleft()
|
||||
# visit dependencies only once
|
||||
if lib_name in seen:
|
||||
continue
|
||||
|
||||
y = self.name_to_obj(lib_name)
|
||||
if not y:
|
||||
raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
|
||||
y.post()
|
||||
seen.add(lib_name)
|
||||
|
||||
# object has ancestors to process (shared libraries): add them to the end of the list
|
||||
if getattr(y, 'uselib_local', None):
|
||||
lst = y.to_list(y.uselib_local)
|
||||
if 'dshlib' in y.features or 'dprogram' in y.features:
|
||||
lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features]
|
||||
tmp.extend(lst)
|
||||
|
||||
# link task and flags
|
||||
if getattr(y, 'link_task', None):
|
||||
|
||||
link_name = y.target[y.target.rfind(os.sep) + 1:]
|
||||
if 'dstaticlib' in y.features or 'dshlib' in y.features:
|
||||
env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name)
|
||||
env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env))
|
||||
|
||||
# the order
|
||||
self.link_task.set_run_after(y.link_task)
|
||||
|
||||
# for the recompilation
|
||||
dep_nodes = getattr(self.link_task, 'dep_nodes', [])
|
||||
self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
|
||||
|
||||
# add ancestors uselib too - but only propagate those that have no staticlib
|
||||
for v in self.to_list(y.uselib):
|
||||
if not v in self.uselib:
|
||||
self.uselib.insert(0, v)
|
||||
|
||||
# if the library task generator provides 'export_incdirs', add to the include path
|
||||
# the export_incdirs must be a list of paths relative to the other library
|
||||
if getattr(y, 'export_incdirs', None):
|
||||
for x in self.to_list(y.export_incdirs):
|
||||
node = y.path.find_dir(x)
|
||||
if not node:
|
||||
raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
|
||||
self.env.append_unique('INC_PATHS', node)
|
||||
|
||||
@feature('dprogram', 'dshlib', 'dstaticlib')
|
||||
@after('apply_core')
|
||||
def apply_d_link(self):
|
||||
link = getattr(self, 'link', None)
|
||||
if not link:
|
||||
if 'dstaticlib' in self.features: link = 'static_link'
|
||||
else: link = 'd_link'
|
||||
|
||||
outputs = [t.outputs[0] for t in self.compiled_tasks]
|
||||
self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self)))
|
||||
|
||||
@feature('d')
|
||||
@after('apply_core')
|
||||
def apply_d_vars(self):
|
||||
env = self.env
|
||||
dpath_st = env['DPATH_ST']
|
||||
lib_st = env['DLIB_ST']
|
||||
libpath_st = env['DLIBPATH_ST']
|
||||
|
||||
importpaths = self.to_list(self.importpaths)
|
||||
libpaths = []
|
||||
libs = []
|
||||
uselib = self.to_list(self.uselib)
|
||||
|
||||
for i in uselib:
|
||||
if env['DFLAGS_' + i]:
|
||||
env.append_unique('DFLAGS', env['DFLAGS_' + i])
|
||||
|
||||
for x in self.features:
|
||||
if not x in ['dprogram', 'dstaticlib', 'dshlib']:
|
||||
continue
|
||||
x.lstrip('d')
|
||||
d_shlib_dflags = env['D_' + x + '_DFLAGS']
|
||||
if d_shlib_dflags:
|
||||
env.append_unique('DFLAGS', d_shlib_dflags)
|
||||
|
||||
# add import paths
|
||||
for i in uselib:
|
||||
if env['DPATH_' + i]:
|
||||
for entry in self.to_list(env['DPATH_' + i]):
|
||||
if not entry in importpaths:
|
||||
importpaths.append(entry)
|
||||
|
||||
# now process the import paths
|
||||
for path in importpaths:
|
||||
if os.path.isabs(path):
|
||||
env.append_unique('_DIMPORTFLAGS', dpath_st % path)
|
||||
else:
|
||||
node = self.path.find_dir(path)
|
||||
self.env.append_unique('INC_PATHS', node)
|
||||
env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
|
||||
env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
|
||||
|
||||
# add library paths
|
||||
for i in uselib:
|
||||
if env['LIBPATH_' + i]:
|
||||
for entry in self.to_list(env['LIBPATH_' + i]):
|
||||
if not entry in libpaths:
|
||||
libpaths.append(entry)
|
||||
libpaths = self.to_list(self.libpaths) + libpaths
|
||||
|
||||
# now process the library paths
|
||||
# apply same path manipulation as used with import paths
|
||||
for path in libpaths:
|
||||
if not os.path.isabs(path):
|
||||
node = self.path.find_resource(path)
|
||||
if not node:
|
||||
raise Utils.WafError('could not find libpath %r from %r' % (path, self))
|
||||
path = node.abspath(self.env)
|
||||
|
||||
env.append_unique('DLINKFLAGS', libpath_st % path)
|
||||
|
||||
# add libraries
|
||||
for i in uselib:
|
||||
if env['LIB_' + i]:
|
||||
for entry in self.to_list(env['LIB_' + i]):
|
||||
if not entry in libs:
|
||||
libs.append(entry)
|
||||
libs.extend(self.to_list(self.libs))
|
||||
|
||||
# process user flags
|
||||
for flag in self.to_list(self.dflags):
|
||||
env.append_unique('DFLAGS', flag)
|
||||
|
||||
# now process the libraries
|
||||
for lib in libs:
|
||||
env.append_unique('DLINKFLAGS', lib_st % lib)
|
||||
|
||||
# add linker flags
|
||||
for i in uselib:
|
||||
dlinkflags = env['DLINKFLAGS_' + i]
|
||||
if dlinkflags:
|
||||
for linkflag in dlinkflags:
|
||||
env.append_unique('DLINKFLAGS', linkflag)
|
||||
|
||||
@feature('dshlib')
|
||||
@after('apply_d_vars')
|
||||
def add_shlib_d_flags(self):
|
||||
for linkflag in self.env['D_shlib_LINKFLAGS']:
|
||||
self.env.append_unique('DLINKFLAGS', linkflag)
|
||||
|
||||
@extension(EXT_D)
|
||||
def d_hook(self, node):
|
||||
# create the compilation task: cpp or cc
|
||||
task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
|
||||
try: obj_ext = self.obj_ext
|
||||
except AttributeError: obj_ext = '_%d.o' % self.idx
|
||||
|
||||
task.inputs = [node]
|
||||
task.outputs = [node.change_ext(obj_ext)]
|
||||
self.compiled_tasks.append(task)
|
||||
|
||||
if self.generate_headers:
|
||||
header_node = node.change_ext(self.env['DHEADER_ext'])
|
||||
task.outputs += [header_node]
|
||||
|
||||
d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
|
||||
d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
|
||||
${D_HDR_F}${TGT[1].bldpath(env)} \
|
||||
${D_SRC_F}${SRC} \
|
||||
${D_TGT_F}${TGT[0].bldpath(env)}'
|
||||
link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
|
||||
|
||||
def override_exec(cls):
|
||||
"""stupid dmd wants -of stuck to the file name"""
|
||||
old_exec = cls.exec_command
|
||||
def exec_command(self, *k, **kw):
|
||||
if isinstance(k[0], list):
|
||||
lst = k[0]
|
||||
for i in xrange(len(lst)):
|
||||
if lst[i] == '-of':
|
||||
del lst[i]
|
||||
lst[i] = '-of' + lst[i]
|
||||
break
|
||||
return old_exec(self, *k, **kw)
|
||||
cls.exec_command = exec_command
|
||||
|
||||
cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
|
||||
cls.scan = scan
|
||||
override_exec(cls)
|
||||
|
||||
cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
|
||||
override_exec(cls)
|
||||
|
||||
cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
|
||||
override_exec(cls)
|
||||
|
||||
# for feature request #104
|
||||
@taskgen
|
||||
def generate_header(self, filename, install_path):
|
||||
if not hasattr(self, 'header_lst'): self.header_lst = []
|
||||
self.meths.append('process_header')
|
||||
self.header_lst.append([filename, install_path])
|
||||
|
||||
@before('apply_core')
|
||||
def process_header(self):
|
||||
env = self.env
|
||||
for i in getattr(self, 'header_lst', []):
|
||||
node = self.path.find_resource(i[0])
|
||||
|
||||
if not node:
|
||||
raise Utils.WafError('file not found on d obj '+i[0])
|
||||
|
||||
task = self.create_task('d_header')
|
||||
task.set_inputs(node)
|
||||
task.set_outputs(node.change_ext('.di'))
|
||||
|
||||
d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
|
||||
Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
|
||||
|
||||
@conftest
|
||||
def d_platform_flags(conf):
|
||||
v = conf.env
|
||||
binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
|
||||
v.DEST_OS or Utils.unversioned_sys_platform())
|
||||
if binfmt == 'pe':
|
||||
v['D_program_PATTERN'] = '%s.exe'
|
||||
v['D_shlib_PATTERN'] = 'lib%s.dll'
|
||||
v['D_staticlib_PATTERN'] = 'lib%s.a'
|
||||
else:
|
||||
v['D_program_PATTERN'] = '%s'
|
||||
v['D_shlib_PATTERN'] = 'lib%s.so'
|
||||
v['D_staticlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
@conftest
|
||||
def check_dlibrary(conf):
|
||||
ret = conf.check_cc(features='d dprogram', fragment=DLIB, mandatory=True, compile_filename='test.d', execute=True)
|
||||
conf.env.DLIBRARY = ret.strip()
|
||||
|
||||
# quick test #
|
||||
if __name__ == "__main__":
|
||||
#Logs.verbose = 2
|
||||
|
||||
try: arg = sys.argv[1]
|
||||
except IndexError: arg = "file.d"
|
||||
|
||||
print("".join(filter_comments(arg)))
|
||||
# TODO
|
||||
paths = ['.']
|
||||
|
||||
#gruik = filter()
|
||||
#gruik.start(arg)
|
||||
|
||||
#code = "".join(gruik.buf)
|
||||
|
||||
#print "we have found the following code"
|
||||
#print code
|
||||
|
||||
#print "now parsing"
|
||||
#print "-------------------------------------------"
|
||||
"""
|
||||
parser_ = d_parser()
|
||||
parser_.start(arg)
|
||||
|
||||
print "module: %s" % parser_.module
|
||||
print "imports: ",
|
||||
for imp in parser_.imports:
|
||||
print imp + " ",
|
||||
print
|
||||
"""
|
33
third_party/waf/wafadmin/Tools/dbus.py
vendored
33
third_party/waf/wafadmin/Tools/dbus.py
vendored
@ -1,33 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Ali Sabil, 2007
|
||||
|
||||
import Task, Utils
|
||||
from TaskGen import taskgen, before, after, feature
|
||||
|
||||
@taskgen
|
||||
def add_dbus_file(self, filename, prefix, mode):
|
||||
if not hasattr(self, 'dbus_lst'):
|
||||
self.dbus_lst = []
|
||||
self.meths.append('process_dbus')
|
||||
self.dbus_lst.append([filename, prefix, mode])
|
||||
|
||||
@before('apply_core')
|
||||
def process_dbus(self):
|
||||
for filename, prefix, mode in getattr(self, 'dbus_lst', []):
|
||||
node = self.path.find_resource(filename)
|
||||
|
||||
if not node:
|
||||
raise Utils.WafError('file not found ' + filename)
|
||||
|
||||
tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
|
||||
|
||||
tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
|
||||
tsk.env.DBUS_BINDING_TOOL_MODE = mode
|
||||
|
||||
Task.simple_task_type('dbus_binding_tool',
|
||||
'${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
|
||||
color='BLUE', before='cc')
|
||||
|
||||
def detect(conf):
|
||||
dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
|
63
third_party/waf/wafadmin/Tools/dmd.py
vendored
63
third_party/waf/wafadmin/Tools/dmd.py
vendored
@ -1,63 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
# Thomas Nagy, 2008 (ita)
|
||||
|
||||
import sys
|
||||
import Utils, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_dmd(conf):
|
||||
conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
|
||||
|
||||
@conftest
|
||||
def common_flags_ldc(conf):
|
||||
v = conf.env
|
||||
v['DFLAGS'] = ['-d-version=Posix']
|
||||
v['DLINKFLAGS'] = []
|
||||
v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
|
||||
|
||||
@conftest
|
||||
def common_flags_dmd(conf):
|
||||
v = conf.env
|
||||
|
||||
# _DFLAGS _DIMPORTFLAGS
|
||||
|
||||
# Compiler is dmd so 'gdc' part will be ignored, just
|
||||
# ensure key is there, so wscript can append flags to it
|
||||
v['DFLAGS'] = ['-version=Posix']
|
||||
|
||||
v['D_SRC_F'] = ''
|
||||
v['D_TGT_F'] = ['-c', '-of']
|
||||
v['DPATH_ST'] = '-I%s' # template for adding import paths
|
||||
|
||||
# linker
|
||||
v['D_LINKER'] = v['D_COMPILER']
|
||||
v['DLNK_SRC_F'] = ''
|
||||
v['DLNK_TGT_F'] = '-of'
|
||||
|
||||
v['DLIB_ST'] = '-L-l%s' # template for adding libs
|
||||
v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths
|
||||
|
||||
# linker debug levels
|
||||
v['DFLAGS_OPTIMIZED'] = ['-O']
|
||||
v['DFLAGS_DEBUG'] = ['-g', '-debug']
|
||||
v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
|
||||
v['DLINKFLAGS'] = ['-quiet']
|
||||
|
||||
v['D_shlib_DFLAGS'] = ['-fPIC']
|
||||
v['D_shlib_LINKFLAGS'] = ['-L-shared']
|
||||
|
||||
v['DHEADER_ext'] = '.di'
|
||||
v['D_HDR_F'] = ['-H', '-Hf']
|
||||
|
||||
def detect(conf):
|
||||
conf.find_dmd()
|
||||
conf.check_tool('ar')
|
||||
conf.check_tool('d')
|
||||
conf.common_flags_dmd()
|
||||
conf.d_platform_flags()
|
||||
|
||||
if conf.env.D_COMPILER.find('ldc') > -1:
|
||||
conf.common_flags_ldc()
|
24
third_party/waf/wafadmin/Tools/flex.py
vendored
24
third_party/waf/wafadmin/Tools/flex.py
vendored
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# John O'Meara, 2006
|
||||
# Thomas Nagy, 2006-2008
|
||||
|
||||
"Flex processing"
|
||||
|
||||
import TaskGen
|
||||
|
||||
def decide_ext(self, node):
|
||||
if 'cxx' in self.features: return '.lex.cc'
|
||||
else: return '.lex.c'
|
||||
|
||||
TaskGen.declare_chain(
|
||||
name = 'flex',
|
||||
rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
|
||||
ext_in = '.l',
|
||||
ext_out = '.c .cxx',
|
||||
decider = decide_ext
|
||||
)
|
||||
|
||||
def detect(conf):
|
||||
conf.find_program('flex', var='FLEX', mandatory=True)
|
||||
conf.env['FLEXFLAGS'] = ''
|
37
third_party/waf/wafadmin/Tools/gas.py
vendored
37
third_party/waf/wafadmin/Tools/gas.py
vendored
@ -1,37 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008 (ita)
|
||||
|
||||
"as and gas"
|
||||
|
||||
import os, sys
|
||||
import Task
|
||||
from TaskGen import extension, taskgen, after, before
|
||||
|
||||
EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
|
||||
|
||||
as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
|
||||
Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
|
||||
|
||||
@extension(EXT_ASM)
|
||||
def asm_hook(self, node):
|
||||
# create the compilation task: cpp or cc
|
||||
try: obj_ext = self.obj_ext
|
||||
except AttributeError: obj_ext = '_%d.o' % self.idx
|
||||
|
||||
task = self.create_task('asm', node, node.change_ext(obj_ext))
|
||||
self.compiled_tasks.append(task)
|
||||
self.meths.append('asm_incflags')
|
||||
|
||||
@after('apply_obj_vars_cc')
|
||||
@after('apply_obj_vars_cxx')
|
||||
@before('apply_link')
|
||||
def asm_incflags(self):
|
||||
self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
|
||||
var = ('cxx' in self.features) and 'CXX' or 'CC'
|
||||
self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
|
||||
|
||||
def detect(conf):
|
||||
conf.find_program(['gas', 'as'], var='AS')
|
||||
if not conf.env.AS: conf.env.AS = conf.env.CC
|
||||
#conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files
|
138
third_party/waf/wafadmin/Tools/gcc.py
vendored
138
third_party/waf/wafadmin/Tools/gcc.py
vendored
@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2008 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
|
||||
import os, sys
|
||||
import Configure, Options, Utils
|
||||
import ccroot, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_gcc(conf):
|
||||
cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
|
||||
cc = conf.cmd_to_list(cc)
|
||||
ccroot.get_cc_version(conf, cc, gcc=True)
|
||||
conf.env.CC_NAME = 'gcc'
|
||||
conf.env.CC = cc
|
||||
|
||||
@conftest
|
||||
def gcc_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
|
||||
|
||||
v['CCFLAGS_DEBUG'] = ['-g']
|
||||
|
||||
v['CCFLAGS_RELEASE'] = ['-O2']
|
||||
|
||||
v['CC_SRC_F'] = ''
|
||||
v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
|
||||
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = ''
|
||||
v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STATICLIB_ST'] = '-l%s'
|
||||
v['STATICLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
v['CCDEFINES_ST'] = '-D%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h,%s'
|
||||
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
|
||||
v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
|
||||
v['FULLSTATIC_MARKER'] = '-static'
|
||||
|
||||
# program
|
||||
v['program_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
|
||||
v['shlib_LINKFLAGS'] = ['-shared']
|
||||
v['shlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
|
||||
v['staticlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
# osx stuff
|
||||
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
|
||||
v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
|
||||
v['macbundle_PATTERN'] = '%s.bundle'
|
||||
|
||||
@conftest
|
||||
def gcc_modifier_win32(conf):
|
||||
v = conf.env
|
||||
v['program_PATTERN'] = '%s.exe'
|
||||
|
||||
v['shlib_PATTERN'] = '%s.dll'
|
||||
v['implib_PATTERN'] = 'lib%s.dll.a'
|
||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
|
||||
|
||||
dest_arch = v['DEST_CPU']
|
||||
v['shlib_CCFLAGS'] = ['-DPIC']
|
||||
|
||||
v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
|
||||
|
||||
# Auto-import is enabled by default even without this option,
|
||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
|
||||
# that the linker emits otherwise.
|
||||
v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
|
||||
|
||||
@conftest
|
||||
def gcc_modifier_cygwin(conf):
|
||||
gcc_modifier_win32(conf)
|
||||
v = conf.env
|
||||
v['shlib_PATTERN'] = 'cyg%s.dll'
|
||||
v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
|
||||
|
||||
@conftest
|
||||
def gcc_modifier_darwin(conf):
|
||||
v = conf.env
|
||||
v['shlib_CCFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
|
||||
v['shlib_LINKFLAGS'] = ['-dynamiclib']
|
||||
v['shlib_PATTERN'] = 'lib%s.dylib'
|
||||
|
||||
v['staticlib_LINKFLAGS'] = []
|
||||
|
||||
v['SHLIB_MARKER'] = ''
|
||||
v['STATICLIB_MARKER'] = ''
|
||||
v['SONAME_ST'] = ''
|
||||
|
||||
@conftest
|
||||
def gcc_modifier_aix(conf):
|
||||
v = conf.env
|
||||
v['program_LINKFLAGS'] = ['-Wl,-brtl']
|
||||
|
||||
v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull']
|
||||
|
||||
v['SHLIB_MARKER'] = ''
|
||||
|
||||
@conftest
|
||||
def gcc_modifier_openbsd(conf):
|
||||
conf.env['SONAME_ST'] = []
|
||||
|
||||
@conftest
|
||||
def gcc_modifier_platform(conf):
|
||||
# * set configurations specific for a platform.
|
||||
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
|
||||
# and if it's not recognised, it fallbacks to sys.platform.
|
||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
|
||||
gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
|
||||
if gcc_modifier_func:
|
||||
gcc_modifier_func(conf)
|
||||
|
||||
def detect(conf):
|
||||
conf.find_gcc()
|
||||
conf.find_cpp()
|
||||
conf.find_ar()
|
||||
conf.gcc_common_flags()
|
||||
conf.gcc_modifier_platform()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
51
third_party/waf/wafadmin/Tools/gdc.py
vendored
51
third_party/waf/wafadmin/Tools/gdc.py
vendored
@ -1,51 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2007 (dv)
|
||||
|
||||
import sys
|
||||
import Utils, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_gdc(conf):
|
||||
conf.find_program('gdc', var='D_COMPILER', mandatory=True)
|
||||
|
||||
@conftest
|
||||
def common_flags_gdc(conf):
|
||||
v = conf.env
|
||||
|
||||
# _DFLAGS _DIMPORTFLAGS
|
||||
|
||||
# for mory info about the meaning of this dict see dmd.py
|
||||
v['DFLAGS'] = []
|
||||
|
||||
v['D_SRC_F'] = ''
|
||||
v['D_TGT_F'] = ['-c', '-o', '']
|
||||
v['DPATH_ST'] = '-I%s' # template for adding import paths
|
||||
|
||||
# linker
|
||||
v['D_LINKER'] = v['D_COMPILER']
|
||||
v['DLNK_SRC_F'] = ''
|
||||
v['DLNK_TGT_F'] = ['-o', '']
|
||||
|
||||
v['DLIB_ST'] = '-l%s' # template for adding libs
|
||||
v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
|
||||
# debug levels
|
||||
v['DLINKFLAGS'] = []
|
||||
v['DFLAGS_OPTIMIZED'] = ['-O3']
|
||||
v['DFLAGS_DEBUG'] = ['-O0']
|
||||
v['DFLAGS_ULTRADEBUG'] = ['-O0']
|
||||
|
||||
v['D_shlib_DFLAGS'] = []
|
||||
v['D_shlib_LINKFLAGS'] = ['-shared']
|
||||
|
||||
v['DHEADER_ext'] = '.di'
|
||||
v['D_HDR_F'] = '-fintfc -fintfc-file='
|
||||
|
||||
def detect(conf):
|
||||
conf.find_gdc()
|
||||
conf.check_tool('ar')
|
||||
conf.check_tool('d')
|
||||
conf.common_flags_gdc()
|
||||
conf.d_platform_flags()
|
163
third_party/waf/wafadmin/Tools/glib2.py
vendored
163
third_party/waf/wafadmin/Tools/glib2.py
vendored
@ -1,163 +0,0 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2008 (ita)
|
||||
|
||||
"GLib2 support"
|
||||
|
||||
import Task, Utils
|
||||
from TaskGen import taskgen, before, after, feature
|
||||
|
||||
#
|
||||
# glib-genmarshal
|
||||
#
|
||||
|
||||
@taskgen
|
||||
def add_marshal_file(self, filename, prefix):
|
||||
if not hasattr(self, 'marshal_list'):
|
||||
self.marshal_list = []
|
||||
self.meths.append('process_marshal')
|
||||
self.marshal_list.append((filename, prefix))
|
||||
|
||||
@before('apply_core')
|
||||
def process_marshal(self):
|
||||
for f, prefix in getattr(self, 'marshal_list', []):
|
||||
node = self.path.find_resource(f)
|
||||
|
||||
if not node:
|
||||
raise Utils.WafError('file not found %r' % f)
|
||||
|
||||
h_node = node.change_ext('.h')
|
||||
c_node = node.change_ext('.c')
|
||||
|
||||
task = self.create_task('glib_genmarshal', node, [h_node, c_node])
|
||||
task.env.GLIB_GENMARSHAL_PREFIX = prefix
|
||||
self.allnodes.append(c_node)
|
||||
|
||||
def genmarshal_func(self):
|
||||
|
||||
bld = self.inputs[0].__class__.bld
|
||||
|
||||
get = self.env.get_flat
|
||||
cmd1 = "%s %s --prefix=%s --header > %s" % (
|
||||
get('GLIB_GENMARSHAL'),
|
||||
self.inputs[0].srcpath(self.env),
|
||||
get('GLIB_GENMARSHAL_PREFIX'),
|
||||
self.outputs[0].abspath(self.env)
|
||||
)
|
||||
|
||||
ret = bld.exec_command(cmd1)
|
||||
if ret: return ret
|
||||
|
||||
#print self.outputs[1].abspath(self.env)
|
||||
f = open(self.outputs[1].abspath(self.env), 'wb')
|
||||
c = '''#include "%s"\n''' % self.outputs[0].name
|
||||
f.write(c)
|
||||
f.close()
|
||||
|
||||
cmd2 = "%s %s --prefix=%s --body >> %s" % (
|
||||
get('GLIB_GENMARSHAL'),
|
||||
self.inputs[0].srcpath(self.env),
|
||||
get('GLIB_GENMARSHAL_PREFIX'),
|
||||
self.outputs[1].abspath(self.env)
|
||||
)
|
||||
ret = Utils.exec_command(cmd2)
|
||||
if ret: return ret
|
||||
|
||||
#
|
||||
# glib-mkenums
|
||||
#
|
||||
|
||||
@taskgen
|
||||
def add_enums_from_template(self, source='', target='', template='', comments=''):
|
||||
if not hasattr(self, 'enums_list'):
|
||||
self.enums_list = []
|
||||
self.meths.append('process_enums')
|
||||
self.enums_list.append({'source': source,
|
||||
'target': target,
|
||||
'template': template,
|
||||
'file-head': '',
|
||||
'file-prod': '',
|
||||
'file-tail': '',
|
||||
'enum-prod': '',
|
||||
'value-head': '',
|
||||
'value-prod': '',
|
||||
'value-tail': '',
|
||||
'comments': comments})
|
||||
|
||||
@taskgen
|
||||
def add_enums(self, source='', target='',
|
||||
file_head='', file_prod='', file_tail='', enum_prod='',
|
||||
value_head='', value_prod='', value_tail='', comments=''):
|
||||
if not hasattr(self, 'enums_list'):
|
||||
self.enums_list = []
|
||||
self.meths.append('process_enums')
|
||||
self.enums_list.append({'source': source,
|
||||
'template': '',
|
||||
'target': target,
|
||||
'file-head': file_head,
|
||||
'file-prod': file_prod,
|
||||
'file-tail': file_tail,
|
||||
'enum-prod': enum_prod,
|
||||
'value-head': value_head,
|
||||
'value-prod': value_prod,
|
||||
'value-tail': value_tail,
|
||||
'comments': comments})
|
||||
|
||||
@before('apply_core')
|
||||
def process_enums(self):
|
||||
for enum in getattr(self, 'enums_list', []):
|
||||
task = self.create_task('glib_mkenums')
|
||||
env = task.env
|
||||
|
||||
inputs = []
|
||||
|
||||
# process the source
|
||||
source_list = self.to_list(enum['source'])
|
||||
if not source_list:
|
||||
raise Utils.WafError('missing source ' + str(enum))
|
||||
source_list = [self.path.find_resource(k) for k in source_list]
|
||||
inputs += source_list
|
||||
env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
|
||||
|
||||
# find the target
|
||||
if not enum['target']:
|
||||
raise Utils.WafError('missing target ' + str(enum))
|
||||
tgt_node = self.path.find_or_declare(enum['target'])
|
||||
if tgt_node.name.endswith('.c'):
|
||||
self.allnodes.append(tgt_node)
|
||||
env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
|
||||
|
||||
|
||||
options = []
|
||||
|
||||
if enum['template']: # template, if provided
|
||||
template_node = self.path.find_resource(enum['template'])
|
||||
options.append('--template %s' % (template_node.abspath(env)))
|
||||
inputs.append(template_node)
|
||||
params = {'file-head' : '--fhead',
|
||||
'file-prod' : '--fprod',
|
||||
'file-tail' : '--ftail',
|
||||
'enum-prod' : '--eprod',
|
||||
'value-head' : '--vhead',
|
||||
'value-prod' : '--vprod',
|
||||
'value-tail' : '--vtail',
|
||||
'comments': '--comments'}
|
||||
for param, option in params.iteritems():
|
||||
if enum[param]:
|
||||
options.append('%s %r' % (option, enum[param]))
|
||||
|
||||
env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
|
||||
|
||||
# update the task instance
|
||||
task.set_inputs(inputs)
|
||||
task.set_outputs(tgt_node)
|
||||
|
||||
Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
|
||||
color='BLUE', before='cc cxx')
|
||||
Task.simple_task_type('glib_mkenums',
|
||||
'${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
|
||||
color='PINK', before='cc cxx')
|
||||
|
||||
def detect(conf):
|
||||
glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
|
||||
mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')
|
222
third_party/waf/wafadmin/Tools/gnome.py
vendored
222
third_party/waf/wafadmin/Tools/gnome.py
vendored
@ -1,222 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2008 (ita)
|
||||
|
||||
"Gnome support"
|
||||
|
||||
import os, re
|
||||
import TaskGen, Utils, Runner, Task, Build, Options, Logs
|
||||
import cc
|
||||
from Logs import error
|
||||
from TaskGen import taskgen, before, after, feature
|
||||
|
||||
n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
|
||||
n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
|
||||
|
||||
def postinstall_schemas(prog_name):
|
||||
if Build.bld.is_install:
|
||||
dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
|
||||
if not Options.options.destdir:
|
||||
# add the gconf schema
|
||||
Utils.pprint('YELLOW', 'Installing GConf schema')
|
||||
command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
|
||||
ret = Utils.exec_command(command)
|
||||
else:
|
||||
Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
|
||||
Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
|
||||
|
||||
def postinstall_icons():
|
||||
dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
|
||||
if Build.bld.is_install:
|
||||
if not Options.options.destdir:
|
||||
# update the pixmap cache directory
|
||||
Utils.pprint('YELLOW', "Updating Gtk icon cache.")
|
||||
command = 'gtk-update-icon-cache -q -f -t %s' % dir
|
||||
ret = Utils.exec_command(command)
|
||||
else:
|
||||
Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
|
||||
Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
|
||||
|
||||
def postinstall_scrollkeeper(prog_name):
|
||||
if Build.bld.is_install:
|
||||
# now the scrollkeeper update if we can write to the log file
|
||||
if os.access('/var/log/scrollkeeper.log', os.W_OK):
|
||||
dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
|
||||
dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
|
||||
command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
|
||||
ret = Utils.exec_command(command)
|
||||
|
||||
def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
|
||||
if schemas: postinstall_schemas(prog_name)
|
||||
if icons: postinstall_icons()
|
||||
if scrollkeeper: postinstall_scrollkeeper(prog_name)
|
||||
|
||||
# OBSOLETE
|
||||
class gnome_doc_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@feature('gnome_doc')
|
||||
def init_gnome_doc(self):
|
||||
self.default_install_path = '${PREFIX}/share'
|
||||
|
||||
@feature('gnome_doc')
|
||||
@after('init_gnome_doc')
|
||||
def apply_gnome_doc(self):
|
||||
self.env['APPNAME'] = self.doc_module
|
||||
lst = self.to_list(self.doc_linguas)
|
||||
bld = self.bld
|
||||
lst.append('C')
|
||||
|
||||
for x in lst:
|
||||
if not x == 'C':
|
||||
tsk = self.create_task('xml2po')
|
||||
node = self.path.find_resource(x+'/'+x+'.po')
|
||||
src = self.path.find_resource('C/%s.xml' % self.doc_module)
|
||||
out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
|
||||
tsk.set_inputs([node, src])
|
||||
tsk.set_outputs(out)
|
||||
else:
|
||||
out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
|
||||
|
||||
tsk2 = self.create_task('xsltproc2po')
|
||||
out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
|
||||
tsk2.set_outputs(out2)
|
||||
node = self.path.find_resource(self.doc_module+".omf.in")
|
||||
tsk2.inputs = [node, out]
|
||||
|
||||
tsk2.run_after.append(tsk)
|
||||
|
||||
if bld.is_install:
|
||||
path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
|
||||
bld.install_files(self.install_path + '/omf', out2, env=self.env)
|
||||
for y in self.to_list(self.doc_figures):
|
||||
try:
|
||||
os.stat(self.path.abspath() + '/' + x + '/' + y)
|
||||
bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
|
||||
except:
|
||||
bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
|
||||
bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
|
||||
if x == 'C':
|
||||
xmls = self.to_list(self.doc_includes)
|
||||
xmls.append(self.doc_entities)
|
||||
for z in xmls:
|
||||
out = self.path.find_resource('%s/%s' % (x, z))
|
||||
bld.install_as(path + '/%s' % z, out.abspath(self.env))
|
||||
|
||||
# OBSOLETE
|
||||
class xml_to_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@feature('xml_to')
|
||||
def init_xml_to(self):
|
||||
Utils.def_attrs(self,
|
||||
source = 'xmlfile',
|
||||
xslt = 'xlsltfile',
|
||||
target = 'hey',
|
||||
default_install_path = '${PREFIX}',
|
||||
task_created = None)
|
||||
|
||||
@feature('xml_to')
|
||||
@after('init_xml_to')
|
||||
def apply_xml_to(self):
|
||||
xmlfile = self.path.find_resource(self.source)
|
||||
xsltfile = self.path.find_resource(self.xslt)
|
||||
tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html'))
|
||||
tsk.install_path = self.install_path
|
||||
|
||||
def sgml_scan(self):
|
||||
node = self.inputs[0]
|
||||
|
||||
env = self.env
|
||||
variant = node.variant(env)
|
||||
|
||||
fi = open(node.abspath(env), 'r')
|
||||
content = fi.read()
|
||||
fi.close()
|
||||
|
||||
# we should use a sgml parser :-/
|
||||
name = n1_regexp.findall(content)[0]
|
||||
num = n2_regexp.findall(content)[0]
|
||||
|
||||
doc_name = name+'.'+num
|
||||
|
||||
if not self.outputs:
|
||||
self.outputs = [self.generator.path.find_or_declare(doc_name)]
|
||||
|
||||
return ([], [doc_name])
|
||||
|
||||
class gnome_sgml2man_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@feature('gnome_sgml2man')
|
||||
def apply_gnome_sgml2man(self):
|
||||
"""
|
||||
we could make it more complicated, but for now we just scan the document each time
|
||||
"""
|
||||
assert(getattr(self, 'appname', None))
|
||||
|
||||
def install_result(task):
|
||||
out = task.outputs[0]
|
||||
name = out.name
|
||||
ext = name[-1]
|
||||
env = task.env
|
||||
self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
|
||||
|
||||
self.bld.rescan(self.path)
|
||||
for name in self.bld.cache_dir_contents[self.path.id]:
|
||||
base, ext = os.path.splitext(name)
|
||||
if ext != '.sgml': continue
|
||||
|
||||
task = self.create_task('sgml2man')
|
||||
task.set_inputs(self.path.find_resource(name))
|
||||
task.task_generator = self
|
||||
if self.bld.is_install: task.install = install_result
|
||||
# no outputs, the scanner does it
|
||||
# no caching for now, this is not a time-critical feature
|
||||
# in the future the scanner can be used to do more things (find dependencies, etc)
|
||||
task.scan()
|
||||
|
||||
cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE')
|
||||
cls.scan = sgml_scan
|
||||
cls.quiet = 1
|
||||
|
||||
Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
|
||||
|
||||
Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
|
||||
|
||||
# how do you expect someone to understand this?!
|
||||
xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
|
||||
--stringparam db2omf.basename ${APPNAME} \
|
||||
--stringparam db2omf.format docbook \
|
||||
--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
|
||||
--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
|
||||
--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
|
||||
--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
|
||||
--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
|
||||
--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
|
||||
${DB2OMF} ${SRC[1].abspath(env)}"""
|
||||
|
||||
#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
|
||||
Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
|
||||
|
||||
def detect(conf):
|
||||
conf.check_tool('gnu_dirs glib2 dbus')
|
||||
sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
|
||||
|
||||
def getstr(varname):
|
||||
return getattr(Options.options, varname, '')
|
||||
|
||||
# addefine also sets the variable to the env
|
||||
conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
|
||||
|
||||
xml2po = conf.find_program('xml2po', var='XML2PO')
|
||||
xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
|
||||
conf.env['XML2POFLAGS'] = '-e -p'
|
||||
conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
|
||||
conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
|
110
third_party/waf/wafadmin/Tools/gnu_dirs.py
vendored
110
third_party/waf/wafadmin/Tools/gnu_dirs.py
vendored
@ -1,110 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Ali Sabil, 2007
|
||||
|
||||
"""
|
||||
To use this module do not forget to call
|
||||
opt.tool_options('gnu_dirs')
|
||||
AND
|
||||
conf.check_tool('gnu_dirs')
|
||||
|
||||
Add options for the standard GNU directories, this tool will add the options
|
||||
found in autotools, and will update the environment with the following
|
||||
installation variables:
|
||||
|
||||
* PREFIX : architecture-independent files [/usr/local]
|
||||
* EXEC_PREFIX : architecture-dependent files [PREFIX]
|
||||
* BINDIR : user executables [EXEC_PREFIX/bin]
|
||||
* SBINDIR : user executables [EXEC_PREFIX/sbin]
|
||||
* LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
|
||||
* SYSCONFDIR : read-only single-machine data [PREFIX/etc]
|
||||
* SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
|
||||
* LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
|
||||
* LIBDIR : object code libraries [EXEC_PREFIX/lib]
|
||||
* INCLUDEDIR : C header files [PREFIX/include]
|
||||
* OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
|
||||
* DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
|
||||
* DATADIR : read-only architecture-independent data [DATAROOTDIR]
|
||||
* INFODIR : info documentation [DATAROOTDIR/info]
|
||||
* LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
|
||||
* MANDIR : man documentation [DATAROOTDIR/man]
|
||||
* DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
|
||||
* HTMLDIR : html documentation [DOCDIR]
|
||||
* DVIDIR : dvi documentation [DOCDIR]
|
||||
* PDFDIR : pdf documentation [DOCDIR]
|
||||
* PSDIR : ps documentation [DOCDIR]
|
||||
"""
|
||||
|
||||
import Utils, Options
|
||||
|
||||
_options = [x.split(', ') for x in '''
|
||||
bindir, user executables, ${EXEC_PREFIX}/bin
|
||||
sbindir, system admin executables, ${EXEC_PREFIX}/sbin
|
||||
libexecdir, program executables, ${EXEC_PREFIX}/libexec
|
||||
sysconfdir, read-only single-machine data, ${PREFIX}/etc
|
||||
sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
|
||||
localstatedir, modifiable single-machine data, ${PREFIX}/var
|
||||
libdir, object code libraries, ${EXEC_PREFIX}/lib
|
||||
includedir, C header files, ${PREFIX}/include
|
||||
oldincludedir, C header files for non-gcc, /usr/include
|
||||
datarootdir, read-only arch.-independent data root, ${PREFIX}/share
|
||||
datadir, read-only architecture-independent data, ${DATAROOTDIR}
|
||||
infodir, info documentation, ${DATAROOTDIR}/info
|
||||
localedir, locale-dependent data, ${DATAROOTDIR}/locale
|
||||
mandir, man documentation, ${DATAROOTDIR}/man
|
||||
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
|
||||
htmldir, html documentation, ${DOCDIR}
|
||||
dvidir, dvi documentation, ${DOCDIR}
|
||||
pdfdir, pdf documentation, ${DOCDIR}
|
||||
psdir, ps documentation, ${DOCDIR}
|
||||
'''.split('\n') if x]
|
||||
|
||||
def detect(conf):
|
||||
def get_param(varname, default):
|
||||
return getattr(Options.options, varname, '') or default
|
||||
|
||||
env = conf.env
|
||||
env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
|
||||
env['PACKAGE'] = Utils.g_module.APPNAME
|
||||
|
||||
complete = False
|
||||
iter = 0
|
||||
while not complete and iter < len(_options) + 1:
|
||||
iter += 1
|
||||
complete = True
|
||||
for name, help, default in _options:
|
||||
name = name.upper()
|
||||
if not env[name]:
|
||||
try:
|
||||
env[name] = Utils.subst_vars(get_param(name, default), env)
|
||||
except TypeError:
|
||||
complete = False
|
||||
if not complete:
|
||||
lst = [name for name, _, _ in _options if not env[name.upper()]]
|
||||
raise Utils.WafError('Variable substitution failure %r' % lst)
|
||||
|
||||
def set_options(opt):
|
||||
|
||||
inst_dir = opt.add_option_group('Installation directories',
|
||||
'By default, "waf install" will put the files in\
|
||||
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
|
||||
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
|
||||
|
||||
for k in ('--prefix', '--destdir'):
|
||||
option = opt.parser.get_option(k)
|
||||
if option:
|
||||
opt.parser.remove_option(k)
|
||||
inst_dir.add_option(option)
|
||||
|
||||
inst_dir.add_option('--exec-prefix',
|
||||
help = 'installation prefix [Default: ${PREFIX}]',
|
||||
default = '',
|
||||
dest = 'EXEC_PREFIX')
|
||||
|
||||
dirs_options = opt.add_option_group('Pre-defined installation directories', '')
|
||||
|
||||
for name, help, default in _options:
|
||||
option_name = '--' + name
|
||||
str_default = default
|
||||
str_help = '%s [Default: %s]' % (help, str_default)
|
||||
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
|
17
third_party/waf/wafadmin/Tools/gob2.py
vendored
17
third_party/waf/wafadmin/Tools/gob2.py
vendored
@ -1,17 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Ali Sabil, 2007
|
||||
|
||||
import TaskGen
|
||||
|
||||
TaskGen.declare_chain(
|
||||
name = 'gob2',
|
||||
rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
|
||||
ext_in = '.gob',
|
||||
ext_out = '.c'
|
||||
)
|
||||
|
||||
def detect(conf):
|
||||
gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
|
||||
conf.env['GOB2'] = gob2
|
||||
conf.env['GOB2FLAGS'] = ''
|
136
third_party/waf/wafadmin/Tools/gxx.py
vendored
136
third_party/waf/wafadmin/Tools/gxx.py
vendored
@ -1,136 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
|
||||
import os, sys
|
||||
import Configure, Options, Utils
|
||||
import ccroot, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_gxx(conf):
|
||||
cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
|
||||
cxx = conf.cmd_to_list(cxx)
|
||||
ccroot.get_cc_version(conf, cxx, gcc=True)
|
||||
conf.env.CXX_NAME = 'gcc'
|
||||
conf.env.CXX = cxx
|
||||
|
||||
@conftest
|
||||
def gxx_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
|
||||
v['CXXFLAGS_DEBUG'] = ['-g']
|
||||
v['CXXFLAGS_RELEASE'] = ['-O2']
|
||||
|
||||
v['CXX_SRC_F'] = ''
|
||||
v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
|
||||
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
|
||||
|
||||
# linker
|
||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
|
||||
v['CXXLNK_SRC_F'] = ''
|
||||
v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STATICLIB_ST'] = '-l%s'
|
||||
v['STATICLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
v['CXXDEFINES_ST'] = '-D%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h,%s'
|
||||
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
|
||||
v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
|
||||
v['FULLSTATIC_MARKER'] = '-static'
|
||||
|
||||
# program
|
||||
v['program_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
|
||||
v['shlib_LINKFLAGS'] = ['-shared']
|
||||
v['shlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
|
||||
v['staticlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
# osx stuff
|
||||
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
|
||||
v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
|
||||
v['macbundle_PATTERN'] = '%s.bundle'
|
||||
|
||||
@conftest
|
||||
def gxx_modifier_win32(conf):
|
||||
v = conf.env
|
||||
v['program_PATTERN'] = '%s.exe'
|
||||
|
||||
v['shlib_PATTERN'] = '%s.dll'
|
||||
v['implib_PATTERN'] = 'lib%s.dll.a'
|
||||
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
|
||||
|
||||
dest_arch = v['DEST_CPU']
|
||||
v['shlib_CXXFLAGS'] = []
|
||||
|
||||
v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
|
||||
|
||||
# Auto-import is enabled by default even without this option,
|
||||
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
|
||||
# that the linker emits otherwise.
|
||||
v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
|
||||
|
||||
@conftest
|
||||
def gxx_modifier_cygwin(conf):
|
||||
gxx_modifier_win32(conf)
|
||||
v = conf.env
|
||||
v['shlib_PATTERN'] = 'cyg%s.dll'
|
||||
v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
|
||||
|
||||
@conftest
|
||||
def gxx_modifier_darwin(conf):
|
||||
v = conf.env
|
||||
v['shlib_CXXFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
|
||||
v['shlib_LINKFLAGS'] = ['-dynamiclib']
|
||||
v['shlib_PATTERN'] = 'lib%s.dylib'
|
||||
|
||||
v['staticlib_LINKFLAGS'] = []
|
||||
|
||||
v['SHLIB_MARKER'] = ''
|
||||
v['STATICLIB_MARKER'] = ''
|
||||
v['SONAME_ST'] = ''
|
||||
|
||||
@conftest
|
||||
def gxx_modifier_aix(conf):
|
||||
v = conf.env
|
||||
v['program_LINKFLAGS'] = ['-Wl,-brtl']
|
||||
|
||||
v['shlib_LINKFLAGS'] = ['-shared', '-Wl,-brtl,-bexpfull']
|
||||
|
||||
v['SHLIB_MARKER'] = ''
|
||||
|
||||
@conftest
|
||||
def gxx_modifier_openbsd(conf):
|
||||
conf.env['SONAME_ST'] = []
|
||||
|
||||
@conftest
|
||||
def gxx_modifier_platform(conf):
|
||||
# * set configurations specific for a platform.
|
||||
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
|
||||
# and if it's not recognised, it fallbacks to sys.platform.
|
||||
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
|
||||
gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
|
||||
if gxx_modifier_func:
|
||||
gxx_modifier_func(conf)
|
||||
|
||||
def detect(conf):
|
||||
conf.find_gxx()
|
||||
conf.find_cpp()
|
||||
conf.find_ar()
|
||||
conf.gxx_common_flags()
|
||||
conf.gxx_modifier_platform()
|
||||
conf.cxx_load_tools()
|
||||
conf.cxx_add_flags()
|
||||
conf.link_add_flags()
|
37
third_party/waf/wafadmin/Tools/icc.py
vendored
37
third_party/waf/wafadmin/Tools/icc.py
vendored
@ -1,37 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Stian Selnes, 2008
|
||||
# Thomas Nagy 2009
|
||||
|
||||
import os, sys
|
||||
import Configure, Options, Utils
|
||||
import ccroot, ar, gcc
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_icc(conf):
|
||||
if sys.platform == 'cygwin':
|
||||
conf.fatal('The Intel compiler does not work on Cygwin')
|
||||
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v['CC']: cc = v['CC']
|
||||
elif 'CC' in conf.environ: cc = conf.environ['CC']
|
||||
if not cc: cc = conf.find_program('icc', var='CC')
|
||||
if not cc: cc = conf.find_program('ICL', var='CC')
|
||||
if not cc: conf.fatal('Intel C Compiler (icc) was not found')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
|
||||
ccroot.get_cc_version(conf, cc, icc=True)
|
||||
v['CC'] = cc
|
||||
v['CC_NAME'] = 'icc'
|
||||
|
||||
detect = '''
|
||||
find_icc
|
||||
find_ar
|
||||
gcc_common_flags
|
||||
gcc_modifier_platform
|
||||
cc_load_tools
|
||||
cc_add_flags
|
||||
link_add_flags
|
||||
'''
|
35
third_party/waf/wafadmin/Tools/icpc.py
vendored
35
third_party/waf/wafadmin/Tools/icpc.py
vendored
@ -1,35 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy 2009
|
||||
|
||||
import os, sys
|
||||
import Configure, Options, Utils
|
||||
import ccroot, ar, gxx
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_icpc(conf):
|
||||
if sys.platform == 'cygwin':
|
||||
conf.fatal('The Intel compiler does not work on Cygwin')
|
||||
|
||||
v = conf.env
|
||||
cxx = None
|
||||
if v['CXX']: cxx = v['CXX']
|
||||
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
|
||||
if not cxx: cxx = conf.find_program('icpc', var='CXX')
|
||||
if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
|
||||
cxx = conf.cmd_to_list(cxx)
|
||||
|
||||
ccroot.get_cc_version(conf, cxx, icc=True)
|
||||
v['CXX'] = cxx
|
||||
v['CXX_NAME'] = 'icc'
|
||||
|
||||
detect = '''
|
||||
find_icpc
|
||||
find_ar
|
||||
gxx_common_flags
|
||||
gxx_modifier_platform
|
||||
cxx_load_tools
|
||||
cxx_add_flags
|
||||
link_add_flags
|
||||
'''
|
138
third_party/waf/wafadmin/Tools/intltool.py
vendored
138
third_party/waf/wafadmin/Tools/intltool.py
vendored
@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"intltool support"
|
||||
|
||||
import os, re
|
||||
import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
|
||||
from TaskGen import feature, before, taskgen
|
||||
from Logs import error
|
||||
|
||||
"""
|
||||
Usage:
|
||||
|
||||
bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
|
||||
|
||||
"""
|
||||
|
||||
class intltool_in_taskgen(TaskGen.task_gen):
|
||||
"""deprecated"""
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@before('apply_core')
|
||||
@feature('intltool_in')
|
||||
def iapply_intltool_in_f(self):
|
||||
try: self.meths.remove('apply_core')
|
||||
except ValueError: pass
|
||||
|
||||
for i in self.to_list(self.source):
|
||||
node = self.path.find_resource(i)
|
||||
|
||||
podir = getattr(self, 'podir', 'po')
|
||||
podirnode = self.path.find_dir(podir)
|
||||
if not podirnode:
|
||||
error("could not find the podir %r" % podir)
|
||||
continue
|
||||
|
||||
cache = getattr(self, 'intlcache', '.intlcache')
|
||||
self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
|
||||
self.env['INTLPODIR'] = podirnode.srcpath(self.env)
|
||||
self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
|
||||
|
||||
task = self.create_task('intltool', node, node.change_ext(''))
|
||||
task.install_path = self.install_path
|
||||
|
||||
class intltool_po_taskgen(TaskGen.task_gen):
|
||||
"""deprecated"""
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
|
||||
@feature('intltool_po')
|
||||
def apply_intltool_po(self):
|
||||
try: self.meths.remove('apply_core')
|
||||
except ValueError: pass
|
||||
|
||||
self.default_install_path = '${LOCALEDIR}'
|
||||
appname = getattr(self, 'appname', 'set_your_app_name')
|
||||
podir = getattr(self, 'podir', '')
|
||||
|
||||
def install_translation(task):
|
||||
out = task.outputs[0]
|
||||
filename = out.name
|
||||
(langname, ext) = os.path.splitext(filename)
|
||||
inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
|
||||
self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
|
||||
|
||||
linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
|
||||
if linguas:
|
||||
# scan LINGUAS file for locales to process
|
||||
file = open(linguas.abspath())
|
||||
langs = []
|
||||
for line in file.readlines():
|
||||
# ignore lines containing comments
|
||||
if not line.startswith('#'):
|
||||
langs += line.split()
|
||||
file.close()
|
||||
re_linguas = re.compile('[-a-zA-Z_@.]+')
|
||||
for lang in langs:
|
||||
# Make sure that we only process lines which contain locales
|
||||
if re_linguas.match(lang):
|
||||
node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
|
||||
task = self.create_task('po')
|
||||
task.set_inputs(node)
|
||||
task.set_outputs(node.change_ext('.mo'))
|
||||
if self.bld.is_install: task.install = install_translation
|
||||
else:
|
||||
Utils.pprint('RED', "Error no LINGUAS file found in po directory")
|
||||
|
||||
Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
|
||||
Task.simple_task_type('intltool',
|
||||
'${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
|
||||
color='BLUE', after="cc_link cxx_link", shell=False)
|
||||
|
||||
def detect(conf):
|
||||
pocom = conf.find_program('msgfmt')
|
||||
if not pocom:
|
||||
# if msgfmt should not be mandatory, catch the thrown exception in your wscript
|
||||
conf.fatal('The program msgfmt (gettext) is mandatory!')
|
||||
conf.env['POCOM'] = pocom
|
||||
|
||||
# NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
|
||||
|
||||
intltool = conf.find_program('intltool-merge', var='INTLTOOL')
|
||||
if not intltool:
|
||||
# if intltool-merge should not be mandatory, catch the thrown exception in your wscript
|
||||
if Options.platform == 'win32':
|
||||
perl = conf.find_program('perl', var='PERL')
|
||||
if not perl:
|
||||
conf.fatal('The program perl (required by intltool) could not be found')
|
||||
|
||||
intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
|
||||
if not intltooldir:
|
||||
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
|
||||
|
||||
conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
|
||||
conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
|
||||
else:
|
||||
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
|
||||
|
||||
def getstr(varname):
|
||||
return getattr(Options.options, varname, '')
|
||||
|
||||
prefix = conf.env['PREFIX']
|
||||
datadir = getstr('datadir')
|
||||
if not datadir: datadir = os.path.join(prefix,'share')
|
||||
|
||||
conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
|
||||
conf.define('DATADIR', datadir)
|
||||
|
||||
if conf.env['CC'] or conf.env['CXX']:
|
||||
# Define to 1 if <locale.h> is present
|
||||
conf.check(header_name='locale.h')
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
|
||||
opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')
|
254
third_party/waf/wafadmin/Tools/javaw.py
vendored
254
third_party/waf/wafadmin/Tools/javaw.py
vendored
@ -1,254 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2008 (ita)
|
||||
|
||||
"""
|
||||
Java support
|
||||
|
||||
Javac is one of the few compilers that behaves very badly:
|
||||
* it outputs files where it wants to (-d is only for the package root)
|
||||
* it recompiles files silently behind your back
|
||||
* it outputs an undefined amount of files (inner classes)
|
||||
|
||||
Fortunately, the convention makes it possible to use the build dir without
|
||||
too many problems for the moment
|
||||
|
||||
Inner classes must be located and cleaned when a problem arise,
|
||||
for the moment waf does not track the production of inner classes.
|
||||
|
||||
Adding all the files to a task and executing it if any of the input files
|
||||
change is only annoying for the compilation times
|
||||
|
||||
Compilation can be run using Jython[1] rather than regular Python. Instead of
|
||||
running one of the following commands:
|
||||
./waf configure
|
||||
python waf configure
|
||||
You would have to run:
|
||||
java -jar /path/to/jython.jar waf configure
|
||||
|
||||
[1] http://www.jython.org/
|
||||
"""
|
||||
|
||||
import os, re
|
||||
from Configure import conf
|
||||
import TaskGen, Task, Utils, Options, Build
|
||||
from TaskGen import feature, before, taskgen
|
||||
|
||||
class_check_source = '''
|
||||
public class Test {
|
||||
public static void main(String[] argv) {
|
||||
Class lib;
|
||||
if (argv.length < 1) {
|
||||
System.err.println("Missing argument");
|
||||
System.exit(77);
|
||||
}
|
||||
try {
|
||||
lib = Class.forName(argv[0]);
|
||||
} catch (ClassNotFoundException e) {
|
||||
System.err.println("ClassNotFoundException");
|
||||
System.exit(1);
|
||||
}
|
||||
lib = null;
|
||||
System.exit(0);
|
||||
}
|
||||
}
|
||||
'''
|
||||
|
||||
@feature('jar')
|
||||
@before('apply_core')
|
||||
def jar_files(self):
|
||||
basedir = getattr(self, 'basedir', '.')
|
||||
destfile = getattr(self, 'destfile', 'test.jar')
|
||||
jaropts = getattr(self, 'jaropts', [])
|
||||
jarcreate = getattr(self, 'jarcreate', 'cf')
|
||||
|
||||
dir = self.path.find_dir(basedir)
|
||||
if not dir: raise
|
||||
|
||||
jaropts.append('-C')
|
||||
jaropts.append(dir.abspath(self.env))
|
||||
jaropts.append('.')
|
||||
|
||||
out = self.path.find_or_declare(destfile)
|
||||
|
||||
tsk = self.create_task('jar_create')
|
||||
tsk.set_outputs(out)
|
||||
tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
|
||||
tsk.env['JAROPTS'] = jaropts
|
||||
tsk.env['JARCREATE'] = jarcreate
|
||||
|
||||
@feature('javac')
|
||||
@before('apply_core')
|
||||
def apply_java(self):
|
||||
Utils.def_attrs(self, jarname='', jaropts='', classpath='',
|
||||
sourcepath='.', srcdir='.', source_re='**/*.java',
|
||||
jar_mf_attributes={}, jar_mf_classpath=[])
|
||||
|
||||
if getattr(self, 'source_root', None):
|
||||
# old stuff
|
||||
self.srcdir = self.source_root
|
||||
|
||||
|
||||
nodes_lst = []
|
||||
|
||||
if not self.classpath:
|
||||
if not self.env['CLASSPATH']:
|
||||
self.env['CLASSPATH'] = '..' + os.pathsep + '.'
|
||||
else:
|
||||
self.env['CLASSPATH'] = self.classpath
|
||||
|
||||
srcdir_node = self.path.find_dir(self.srcdir)
|
||||
if not srcdir_node:
|
||||
raise Utils.WafError('could not find srcdir %r' % self.srcdir)
|
||||
|
||||
src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
|
||||
bld_nodes = [x.change_ext('.class') for x in src_nodes]
|
||||
|
||||
self.env['OUTDIR'] = [srcdir_node.bldpath(self.env)]
|
||||
|
||||
tsk = self.create_task('javac')
|
||||
tsk.set_inputs(src_nodes)
|
||||
tsk.set_outputs(bld_nodes)
|
||||
|
||||
if getattr(self, 'compat', None):
|
||||
tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
|
||||
|
||||
if hasattr(self, 'sourcepath'):
|
||||
fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
|
||||
names = os.pathsep.join([x.srcpath() for x in fold])
|
||||
else:
|
||||
names = srcdir_node.srcpath()
|
||||
|
||||
if names:
|
||||
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
|
||||
|
||||
if self.jarname:
|
||||
jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
|
||||
jtsk.set_run_after(tsk)
|
||||
|
||||
if not self.env.JAROPTS:
|
||||
if self.jaropts:
|
||||
self.env.JAROPTS = self.jaropts
|
||||
else:
|
||||
dirs = '.'
|
||||
self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
|
||||
|
||||
Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN', shell=False)
|
||||
cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}', shell=False)
|
||||
cls.color = 'BLUE'
|
||||
def post_run_javac(self):
|
||||
"""this is for cleaning the folder
|
||||
javac creates single files for inner classes
|
||||
but it is not possible to know which inner classes in advance"""
|
||||
|
||||
par = {}
|
||||
for x in self.inputs:
|
||||
par[x.parent.id] = x.parent
|
||||
|
||||
inner = {}
|
||||
for k in par.values():
|
||||
path = k.abspath(self.env)
|
||||
lst = os.listdir(path)
|
||||
|
||||
for u in lst:
|
||||
if u.find('$') >= 0:
|
||||
inner_class_node = k.find_or_declare(u)
|
||||
inner[inner_class_node.id] = inner_class_node
|
||||
|
||||
to_add = set(inner.keys()) - set([x.id for x in self.outputs])
|
||||
for x in to_add:
|
||||
self.outputs.append(inner[x])
|
||||
|
||||
self.cached = True # disable the cache here - inner classes are a problem
|
||||
return Task.Task.post_run(self)
|
||||
cls.post_run = post_run_javac
|
||||
|
||||
def detect(conf):
|
||||
# If JAVA_PATH is set, we prepend it to the path list
|
||||
java_path = conf.environ['PATH'].split(os.pathsep)
|
||||
v = conf.env
|
||||
|
||||
if 'JAVA_HOME' in conf.environ:
|
||||
java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
|
||||
conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
|
||||
|
||||
for x in 'javac java jar'.split():
|
||||
conf.find_program(x, var=x.upper(), path_list=java_path)
|
||||
conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
|
||||
v['JAVA_EXT'] = ['.java']
|
||||
|
||||
if 'CLASSPATH' in conf.environ:
|
||||
v['CLASSPATH'] = conf.environ['CLASSPATH']
|
||||
|
||||
if not v['JAR']: conf.fatal('jar is required for making java packages')
|
||||
if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
|
||||
v['JARCREATE'] = 'cf' # can use cvf
|
||||
|
||||
@conf
|
||||
def check_java_class(self, classname, with_classpath=None):
|
||||
"""Check if the specified java class is installed"""
|
||||
|
||||
import shutil
|
||||
|
||||
javatestdir = '.waf-javatest'
|
||||
|
||||
classpath = javatestdir
|
||||
if self.env['CLASSPATH']:
|
||||
classpath += os.pathsep + self.env['CLASSPATH']
|
||||
if isinstance(with_classpath, str):
|
||||
classpath += os.pathsep + with_classpath
|
||||
|
||||
shutil.rmtree(javatestdir, True)
|
||||
os.mkdir(javatestdir)
|
||||
|
||||
java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
|
||||
java_file.write(class_check_source)
|
||||
java_file.close()
|
||||
|
||||
# Compile the source
|
||||
Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
|
||||
|
||||
# Try to run the app
|
||||
cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
|
||||
self.log.write("%s\n" % str(cmd))
|
||||
found = Utils.exec_command(cmd, shell=False, log=self.log)
|
||||
|
||||
self.check_message('Java class %s' % classname, "", not found)
|
||||
|
||||
shutil.rmtree(javatestdir, True)
|
||||
|
||||
return found
|
||||
|
||||
@conf
|
||||
def check_jni_headers(conf):
|
||||
"""
|
||||
Check for jni headers and libraries
|
||||
|
||||
On success the environment variable xxx_JAVA is added for uselib
|
||||
"""
|
||||
|
||||
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
|
||||
conf.fatal('load a compiler first (gcc, g++, ..)')
|
||||
|
||||
if not conf.env.JAVA_HOME:
|
||||
conf.fatal('set JAVA_HOME in the system environment')
|
||||
|
||||
# jni requires the jvm
|
||||
javaHome = conf.env['JAVA_HOME'][0]
|
||||
|
||||
b = Build.BuildContext()
|
||||
b.load_dirs(conf.srcdir, conf.blddir)
|
||||
dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
|
||||
f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
|
||||
incDirs = [x.parent.abspath() for x in f]
|
||||
|
||||
dir = b.root.find_dir(conf.env.JAVA_HOME[0])
|
||||
f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
|
||||
libDirs = [x.parent.abspath() for x in f] or [javaHome]
|
||||
|
||||
for i, d in enumerate(libDirs):
|
||||
if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
|
||||
libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
|
||||
break
|
||||
else:
|
||||
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
|
73
third_party/waf/wafadmin/Tools/kde4.py
vendored
73
third_party/waf/wafadmin/Tools/kde4.py
vendored
@ -1,73 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
import os, sys, re
|
||||
import Options, TaskGen, Task, Utils
|
||||
from TaskGen import taskgen, feature, after
|
||||
|
||||
class msgfmt_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@feature('msgfmt')
|
||||
def init_msgfmt(self):
|
||||
#langs = '' # for example "foo/fr foo/br"
|
||||
self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
|
||||
|
||||
@feature('msgfmt')
|
||||
@after('init_msgfmt')
|
||||
def apply_msgfmt(self):
|
||||
for lang in self.to_list(self.langs):
|
||||
node = self.path.find_resource(lang+'.po')
|
||||
task = self.create_task('msgfmt', node, node.change_ext('.mo'))
|
||||
|
||||
if not self.bld.is_install: continue
|
||||
langname = lang.split('/')
|
||||
langname = langname[-1]
|
||||
task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
|
||||
task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
|
||||
task.chmod = self.chmod
|
||||
|
||||
def detect(conf):
|
||||
kdeconfig = conf.find_program('kde4-config')
|
||||
if not kdeconfig:
|
||||
conf.fatal('we need kde4-config')
|
||||
prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
|
||||
file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
|
||||
try: os.stat(file)
|
||||
except OSError:
|
||||
file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
|
||||
try: os.stat(file)
|
||||
except OSError: conf.fatal('could not open %s' % file)
|
||||
|
||||
try:
|
||||
txt = Utils.readf(file)
|
||||
except (OSError, IOError):
|
||||
conf.fatal('could not read %s' % file)
|
||||
|
||||
txt = txt.replace('\\\n', '\n')
|
||||
fu = re.compile('#(.*)\n')
|
||||
txt = fu.sub('', txt)
|
||||
|
||||
setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
|
||||
found = setregexp.findall(txt)
|
||||
|
||||
for (_, key, val) in found:
|
||||
#print key, val
|
||||
conf.env[key] = val
|
||||
|
||||
# well well, i could just write an interpreter for cmake files
|
||||
conf.env['LIB_KDECORE']='kdecore'
|
||||
conf.env['LIB_KDEUI'] ='kdeui'
|
||||
conf.env['LIB_KIO'] ='kio'
|
||||
conf.env['LIB_KHTML'] ='khtml'
|
||||
conf.env['LIB_KPARTS'] ='kparts'
|
||||
|
||||
conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
|
||||
conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
|
||||
conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
|
||||
|
||||
conf.env['MSGFMT'] = conf.find_program('msgfmt')
|
||||
|
||||
Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)
|
329
third_party/waf/wafadmin/Tools/libtool.py
vendored
329
third_party/waf/wafadmin/Tools/libtool.py
vendored
@ -1,329 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Matthias Jahn, 2008, jahn matthias ath freenet punto de
|
||||
# Thomas Nagy, 2008 (ita)
|
||||
|
||||
import sys, re, os, optparse
|
||||
|
||||
import TaskGen, Task, Utils, preproc
|
||||
from Logs import error, debug, warn
|
||||
from TaskGen import taskgen, after, before, feature
|
||||
|
||||
REVISION="0.1.3"
|
||||
|
||||
"""
|
||||
if you want to use the code here, you must use something like this:
|
||||
obj = obj.create(...)
|
||||
obj.features.append("libtool")
|
||||
obj.vnum = "1.2.3" # optional, but versioned libraries are common
|
||||
"""
|
||||
|
||||
# fake libtool files
|
||||
fakelibtool_vardeps = ['CXX', 'PREFIX']
|
||||
def fakelibtool_build(task):
|
||||
# Writes a .la file, used by libtool
|
||||
env = task.env
|
||||
dest = open(task.outputs[0].abspath(env), 'w')
|
||||
sname = task.inputs[0].name
|
||||
fu = dest.write
|
||||
fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
|
||||
if env['vnum']:
|
||||
nums = env['vnum'].split('.')
|
||||
libname = task.inputs[0].name
|
||||
name3 = libname+'.'+env['vnum']
|
||||
name2 = libname+'.'+nums[0]
|
||||
name1 = libname
|
||||
fu("dlname='%s'\n" % name2)
|
||||
strn = " ".join([name3, name2, name1])
|
||||
fu("library_names='%s'\n" % (strn) )
|
||||
else:
|
||||
fu("dlname='%s'\n" % sname)
|
||||
fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
|
||||
fu("old_library=''\n")
|
||||
vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
|
||||
fu("dependency_libs='%s'\n" % vars)
|
||||
fu("current=0\n")
|
||||
fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
|
||||
fu("dlopen=''\ndlpreopen=''\n")
|
||||
fu("libdir='%s/lib'\n" % env['PREFIX'])
|
||||
dest.close()
|
||||
return 0
|
||||
|
||||
def read_la_file(path):
|
||||
sp = re.compile(r'^([^=]+)=\'(.*)\'$')
|
||||
dc={}
|
||||
file = open(path, "r")
|
||||
for line in file.readlines():
|
||||
try:
|
||||
#print sp.split(line.strip())
|
||||
_, left, right, _ = sp.split(line.strip())
|
||||
dc[left]=right
|
||||
except ValueError:
|
||||
pass
|
||||
file.close()
|
||||
return dc
|
||||
|
||||
@feature("libtool")
|
||||
@after('apply_link')
|
||||
def apply_link_libtool(self):
|
||||
if self.type != 'program':
|
||||
linktask = self.link_task
|
||||
self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la'))
|
||||
|
||||
if self.bld.is_install:
|
||||
self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
|
||||
|
||||
@feature("libtool")
|
||||
@before('apply_core')
|
||||
def apply_libtool(self):
|
||||
self.env['vnum']=self.vnum
|
||||
|
||||
paths=[]
|
||||
libs=[]
|
||||
libtool_files=[]
|
||||
libtool_vars=[]
|
||||
|
||||
for l in self.env['LINKFLAGS']:
|
||||
if l[:2]=='-L':
|
||||
paths.append(l[2:])
|
||||
elif l[:2]=='-l':
|
||||
libs.append(l[2:])
|
||||
|
||||
for l in libs:
|
||||
for p in paths:
|
||||
dict = read_la_file(p+'/lib'+l+'.la')
|
||||
linkflags2 = dict.get('dependency_libs', '')
|
||||
for v in linkflags2.split():
|
||||
if v.endswith('.la'):
|
||||
libtool_files.append(v)
|
||||
libtool_vars.append(v)
|
||||
continue
|
||||
self.env.append_unique('LINKFLAGS', v)
|
||||
break
|
||||
|
||||
self.env['libtoolvars']=libtool_vars
|
||||
|
||||
while libtool_files:
|
||||
file = libtool_files.pop()
|
||||
dict = read_la_file(file)
|
||||
for v in dict['dependency_libs'].split():
|
||||
if v[-3:] == '.la':
|
||||
libtool_files.append(v)
|
||||
continue
|
||||
self.env.append_unique('LINKFLAGS', v)
|
||||
|
||||
Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
|
||||
|
||||
class libtool_la_file:
|
||||
def __init__ (self, la_filename):
|
||||
self.__la_filename = la_filename
|
||||
#remove path and .la suffix
|
||||
self.linkname = str(os.path.split(la_filename)[-1])[:-3]
|
||||
if self.linkname.startswith("lib"):
|
||||
self.linkname = self.linkname[3:]
|
||||
# The name that we can dlopen(3).
|
||||
self.dlname = None
|
||||
# Names of this library
|
||||
self.library_names = None
|
||||
# The name of the static archive.
|
||||
self.old_library = None
|
||||
# Libraries that this one depends upon.
|
||||
self.dependency_libs = None
|
||||
# Version information for libIlmImf.
|
||||
self.current = None
|
||||
self.age = None
|
||||
self.revision = None
|
||||
# Is this an already installed library?
|
||||
self.installed = None
|
||||
# Should we warn about portability when linking against -modules?
|
||||
self.shouldnotlink = None
|
||||
# Files to dlopen/dlpreopen
|
||||
self.dlopen = None
|
||||
self.dlpreopen = None
|
||||
# Directory that this library needs to be installed in:
|
||||
self.libdir = '/usr/lib'
|
||||
if not self.__parse():
|
||||
raise ValueError("file %s not found!!" %(la_filename))
|
||||
|
||||
def __parse(self):
|
||||
"Retrieve the variables from a file"
|
||||
if not os.path.isfile(self.__la_filename): return 0
|
||||
la_file=open(self.__la_filename, 'r')
|
||||
for line in la_file:
|
||||
ln = line.strip()
|
||||
if not ln: continue
|
||||
if ln[0]=='#': continue
|
||||
(key, value) = str(ln).split('=', 1)
|
||||
key = key.strip()
|
||||
value = value.strip()
|
||||
if value == "no": value = False
|
||||
elif value == "yes": value = True
|
||||
else:
|
||||
try: value = int(value)
|
||||
except ValueError: value = value.strip("'")
|
||||
setattr(self, key, value)
|
||||
la_file.close()
|
||||
return 1
|
||||
|
||||
def get_libs(self):
|
||||
"""return linkflags for this lib"""
|
||||
libs = []
|
||||
if self.dependency_libs:
|
||||
libs = str(self.dependency_libs).strip().split()
|
||||
if libs == None:
|
||||
libs = []
|
||||
# add la lib and libdir
|
||||
libs.insert(0, "-l%s" % self.linkname.strip())
|
||||
libs.insert(0, "-L%s" % self.libdir.strip())
|
||||
return libs
|
||||
|
||||
def __str__(self):
|
||||
return '''\
|
||||
dlname = "%(dlname)s"
|
||||
library_names = "%(library_names)s"
|
||||
old_library = "%(old_library)s"
|
||||
dependency_libs = "%(dependency_libs)s"
|
||||
version = %(current)s.%(age)s.%(revision)s
|
||||
installed = "%(installed)s"
|
||||
shouldnotlink = "%(shouldnotlink)s"
|
||||
dlopen = "%(dlopen)s"
|
||||
dlpreopen = "%(dlpreopen)s"
|
||||
libdir = "%(libdir)s"''' % self.__dict__
|
||||
|
||||
class libtool_config:
|
||||
def __init__ (self, la_filename):
|
||||
self.__libtool_la_file = libtool_la_file(la_filename)
|
||||
tmp = self.__libtool_la_file
|
||||
self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
|
||||
self.__sub_la_files = []
|
||||
self.__sub_la_files.append(la_filename)
|
||||
self.__libs = None
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""make it compareable with X.Y.Z versions (Y and Z are optional)"""
|
||||
if not other:
|
||||
return 1
|
||||
othervers = [int(s) for s in str(other).split(".")]
|
||||
selfvers = self.__version
|
||||
return cmp(selfvers, othervers)
|
||||
|
||||
def __str__(self):
|
||||
return "\n".join([
|
||||
str(self.__libtool_la_file),
|
||||
' '.join(self.__libtool_la_file.get_libs()),
|
||||
'* New getlibs:',
|
||||
' '.join(self.get_libs())
|
||||
])
|
||||
|
||||
def __get_la_libs(self, la_filename):
|
||||
return libtool_la_file(la_filename).get_libs()
|
||||
|
||||
def get_libs(self):
|
||||
"""return the complete uniqe linkflags that do not
|
||||
contain .la files anymore"""
|
||||
libs_list = list(self.__libtool_la_file.get_libs())
|
||||
libs_map = {}
|
||||
while len(libs_list) > 0:
|
||||
entry = libs_list.pop(0)
|
||||
if entry:
|
||||
if str(entry).endswith(".la"):
|
||||
## prevents duplicate .la checks
|
||||
if entry not in self.__sub_la_files:
|
||||
self.__sub_la_files.append(entry)
|
||||
libs_list.extend(self.__get_la_libs(entry))
|
||||
else:
|
||||
libs_map[entry]=1
|
||||
self.__libs = libs_map.keys()
|
||||
return self.__libs
|
||||
|
||||
def get_libs_only_L(self):
|
||||
if not self.__libs: self.get_libs()
|
||||
libs = self.__libs
|
||||
libs = [s for s in libs if str(s).startswith('-L')]
|
||||
return libs
|
||||
|
||||
def get_libs_only_l(self):
|
||||
if not self.__libs: self.get_libs()
|
||||
libs = self.__libs
|
||||
libs = [s for s in libs if str(s).startswith('-l')]
|
||||
return libs
|
||||
|
||||
def get_libs_only_other(self):
|
||||
if not self.__libs: self.get_libs()
|
||||
libs = self.__libs
|
||||
libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
|
||||
return libs
|
||||
|
||||
def useCmdLine():
|
||||
"""parse cmdline args and control build"""
|
||||
usage = '''Usage: %prog [options] PathToFile.la
|
||||
example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
|
||||
nor: %prog --libs /usr/lib/libamarok.la'''
|
||||
parser = optparse.OptionParser(usage)
|
||||
a = parser.add_option
|
||||
a("--version", dest = "versionNumber",
|
||||
action = "store_true", default = False,
|
||||
help = "output version of libtool-config"
|
||||
)
|
||||
a("--debug", dest = "debug",
|
||||
action = "store_true", default = False,
|
||||
help = "enable debug"
|
||||
)
|
||||
a("--libs", dest = "libs",
|
||||
action = "store_true", default = False,
|
||||
help = "output all linker flags"
|
||||
)
|
||||
a("--libs-only-l", dest = "libs_only_l",
|
||||
action = "store_true", default = False,
|
||||
help = "output -l flags"
|
||||
)
|
||||
a("--libs-only-L", dest = "libs_only_L",
|
||||
action = "store_true", default = False,
|
||||
help = "output -L flags"
|
||||
)
|
||||
a("--libs-only-other", dest = "libs_only_other",
|
||||
action = "store_true", default = False,
|
||||
help = "output other libs (e.g. -pthread)"
|
||||
)
|
||||
a("--atleast-version", dest = "atleast_version",
|
||||
default=None,
|
||||
help = "return 0 if the module is at least version ATLEAST_VERSION"
|
||||
)
|
||||
a("--exact-version", dest = "exact_version",
|
||||
default=None,
|
||||
help = "return 0 if the module is exactly version EXACT_VERSION"
|
||||
)
|
||||
a("--max-version", dest = "max_version",
|
||||
default=None,
|
||||
help = "return 0 if the module is at no newer than version MAX_VERSION"
|
||||
)
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
if len(args) != 1 and not options.versionNumber:
|
||||
parser.error("incorrect number of arguments")
|
||||
if options.versionNumber:
|
||||
print("libtool-config version %s" % REVISION)
|
||||
return 0
|
||||
ltf = libtool_config(args[0])
|
||||
if options.debug:
|
||||
print(ltf)
|
||||
if options.atleast_version:
|
||||
if ltf >= options.atleast_version: return 0
|
||||
sys.exit(1)
|
||||
if options.exact_version:
|
||||
if ltf == options.exact_version: return 0
|
||||
sys.exit(1)
|
||||
if options.max_version:
|
||||
if ltf <= options.max_version: return 0
|
||||
sys.exit(1)
|
||||
|
||||
def p(x):
|
||||
print(" ".join(x))
|
||||
if options.libs: p(ltf.get_libs())
|
||||
elif options.libs_only_l: p(ltf.get_libs_only_l())
|
||||
elif options.libs_only_L: p(ltf.get_libs_only_L())
|
||||
elif options.libs_only_other: p(ltf.get_libs_only_other())
|
||||
return 0
|
||||
|
||||
if __name__ == '__main__':
|
||||
useCmdLine()
|
24
third_party/waf/wafadmin/Tools/lua.py
vendored
24
third_party/waf/wafadmin/Tools/lua.py
vendored
@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Sebastian Schlingmann, 2008
|
||||
# Thomas Nagy, 2008 (ita)
|
||||
|
||||
import TaskGen
|
||||
from TaskGen import taskgen, feature
|
||||
from Constants import *
|
||||
|
||||
TaskGen.declare_chain(
|
||||
name = 'luac',
|
||||
rule = '${LUAC} -s -o ${TGT} ${SRC}',
|
||||
ext_in = '.lua',
|
||||
ext_out = '.luac',
|
||||
reentrant = False,
|
||||
install = 'LUADIR', # env variable
|
||||
)
|
||||
|
||||
@feature('lua')
|
||||
def init_lua(self):
|
||||
self.default_chmod = O755
|
||||
|
||||
def detect(conf):
|
||||
conf.find_program('luac', var='LUAC', mandatory = True)
|
796
third_party/waf/wafadmin/Tools/msvc.py
vendored
796
third_party/waf/wafadmin/Tools/msvc.py
vendored
@ -1,796 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2006 (dv)
|
||||
# Tamas Pal, 2007 (folti)
|
||||
# Nicolas Mercier, 2009
|
||||
# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
|
||||
|
||||
# usage:
|
||||
#
|
||||
# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
|
||||
# conf.env['MSVC_TARGETS'] = ['x64']
|
||||
# conf.check_tool('msvc')
|
||||
# OR conf.check_tool('msvc', funs='no_autodetect')
|
||||
# conf.check_lib_msvc('gdi32')
|
||||
# conf.check_libs_msvc('kernel32 user32', mandatory=true)
|
||||
# ...
|
||||
# obj.uselib = 'KERNEL32 USER32 GDI32'
|
||||
#
|
||||
# platforms and targets will be tested in the order they appear;
|
||||
# the first good configuration will be used
|
||||
# supported platforms :
|
||||
# ia64, x64, x86, x86_amd64, x86_ia64
|
||||
|
||||
# compilers supported :
|
||||
# msvc => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
|
||||
# wsdk => Windows SDK, versions 6.0, 6.1, 7.0
|
||||
# icl => Intel compiler, versions 9,10,11
|
||||
# Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
|
||||
# PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
|
||||
|
||||
|
||||
import os, sys, re, string, optparse
|
||||
import Utils, TaskGen, Runner, Configure, Task, Options
|
||||
from Logs import debug, info, warn, error
|
||||
from TaskGen import after, before, feature
|
||||
|
||||
from Configure import conftest, conf
|
||||
import ccroot, cc, cxx, ar, winres
|
||||
from libtool import read_la_file
|
||||
|
||||
try:
|
||||
import _winreg
|
||||
except:
|
||||
import winreg as _winreg
|
||||
|
||||
pproc = Utils.pproc
|
||||
|
||||
# importlibs provided by MSVC/Platform SDK. Do NOT search them....
|
||||
g_msvc_systemlibs = """
|
||||
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
|
||||
cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
|
||||
credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
|
||||
ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
|
||||
faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
|
||||
gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
|
||||
kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
|
||||
mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
|
||||
msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
|
||||
netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
|
||||
odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
|
||||
osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
|
||||
ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
|
||||
rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
|
||||
shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
|
||||
traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
|
||||
version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
|
||||
wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
|
||||
""".split()
|
||||
|
||||
|
||||
all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
|
||||
all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
|
||||
all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
|
||||
|
||||
def setup_msvc(conf, versions):
|
||||
platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
|
||||
desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
|
||||
versiondict = dict(versions)
|
||||
|
||||
for version in desired_versions:
|
||||
try:
|
||||
targets = dict(versiondict [version])
|
||||
for target in platforms:
|
||||
try:
|
||||
arch,(p1,p2,p3) = targets[target]
|
||||
compiler,revision = version.split()
|
||||
return compiler,revision,p1,p2,p3
|
||||
except KeyError: continue
|
||||
except KeyError: continue
|
||||
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
|
||||
|
||||
@conf
|
||||
def get_msvc_version(conf, compiler, version, target, vcvars):
|
||||
debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
|
||||
batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
|
||||
f = open(batfile, 'w')
|
||||
f.write("""@echo off
|
||||
set INCLUDE=
|
||||
set LIB=
|
||||
call "%s" %s
|
||||
echo PATH=%%PATH%%
|
||||
echo INCLUDE=%%INCLUDE%%
|
||||
echo LIB=%%LIB%%
|
||||
""" % (vcvars,target))
|
||||
f.close()
|
||||
sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
|
||||
lines = sout.splitlines()
|
||||
|
||||
for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
|
||||
if lines[0].find(x) != -1:
|
||||
break
|
||||
else:
|
||||
debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
|
||||
conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
|
||||
|
||||
for line in lines[1:]:
|
||||
if line.startswith('PATH='):
|
||||
path = line[5:]
|
||||
MSVC_PATH = path.split(';')
|
||||
elif line.startswith('INCLUDE='):
|
||||
MSVC_INCDIR = [i for i in line[8:].split(';') if i]
|
||||
elif line.startswith('LIB='):
|
||||
MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
|
||||
|
||||
# Check if the compiler is usable at all.
|
||||
# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
|
||||
env = {}
|
||||
env.update(os.environ)
|
||||
env.update(PATH = path)
|
||||
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
|
||||
cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
|
||||
# delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
|
||||
if env.has_key('CL'):
|
||||
del(env['CL'])
|
||||
|
||||
try:
|
||||
p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
|
||||
out, err = p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise Exception('return code: %r: %r' % (p.returncode, err))
|
||||
except Exception, e:
|
||||
debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
|
||||
debug(str(e))
|
||||
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
|
||||
else:
|
||||
debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
|
||||
|
||||
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
|
||||
|
||||
@conf
|
||||
def gather_wsdk_versions(conf, versions):
|
||||
version_pattern = re.compile('^v..?.?\...?.?')
|
||||
try:
|
||||
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
|
||||
except WindowsError:
|
||||
try:
|
||||
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
|
||||
except WindowsError:
|
||||
return
|
||||
index = 0
|
||||
while 1:
|
||||
try:
|
||||
version = _winreg.EnumKey(all_versions, index)
|
||||
except WindowsError:
|
||||
break
|
||||
index = index + 1
|
||||
if not version_pattern.match(version):
|
||||
continue
|
||||
try:
|
||||
msvc_version = _winreg.OpenKey(all_versions, version)
|
||||
path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
|
||||
except WindowsError:
|
||||
continue
|
||||
if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
|
||||
targets = []
|
||||
for target,arch in all_msvc_platforms:
|
||||
try:
|
||||
targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
|
||||
except Configure.ConfigurationError:
|
||||
pass
|
||||
versions.append(('wsdk ' + version[1:], targets))
|
||||
|
||||
@conf
|
||||
def gather_msvc_versions(conf, versions):
|
||||
# checks SmartPhones SDKs
|
||||
try:
|
||||
ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
|
||||
except WindowsError:
|
||||
try:
|
||||
ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
|
||||
except WindowsError:
|
||||
ce_sdk = ''
|
||||
if ce_sdk:
|
||||
supported_wince_platforms = []
|
||||
ce_index = 0
|
||||
while 1:
|
||||
try:
|
||||
sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
|
||||
except WindowsError:
|
||||
break
|
||||
ce_index = ce_index + 1
|
||||
sdk = _winreg.OpenKey(ce_sdk, sdk_device)
|
||||
path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
|
||||
path=str(path)
|
||||
path,device = os.path.split(path)
|
||||
if not device:
|
||||
path,device = os.path.split(path)
|
||||
for arch,compiler in all_wince_platforms:
|
||||
platforms = []
|
||||
if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
|
||||
platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
|
||||
if platforms:
|
||||
supported_wince_platforms.append((device, platforms))
|
||||
# checks MSVC
|
||||
version_pattern = re.compile('^..?\...?')
|
||||
for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
|
||||
try:
|
||||
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
|
||||
except WindowsError:
|
||||
try:
|
||||
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
|
||||
except WindowsError:
|
||||
continue
|
||||
index = 0
|
||||
while 1:
|
||||
try:
|
||||
version = _winreg.EnumKey(all_versions, index)
|
||||
except WindowsError:
|
||||
break
|
||||
index = index + 1
|
||||
if not version_pattern.match(version):
|
||||
continue
|
||||
try:
|
||||
msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
|
||||
path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
|
||||
path=str(path)
|
||||
targets = []
|
||||
if ce_sdk:
|
||||
for device,platforms in supported_wince_platforms:
|
||||
cetargets = []
|
||||
for platform,compiler,include,lib in platforms:
|
||||
winCEpath = os.path.join(path, 'VC', 'ce')
|
||||
if os.path.isdir(winCEpath):
|
||||
common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
|
||||
if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
|
||||
bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
|
||||
incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
|
||||
libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
|
||||
cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
|
||||
versions.append((device+' '+version, cetargets))
|
||||
if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
|
||||
for target,realtarget in all_msvc_platforms[::-1]:
|
||||
try:
|
||||
targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
|
||||
except:
|
||||
pass
|
||||
elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
|
||||
try:
|
||||
targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
|
||||
except Configure.ConfigurationError:
|
||||
pass
|
||||
versions.append(('msvc '+version, targets))
|
||||
|
||||
except WindowsError:
|
||||
continue
|
||||
|
||||
@conf
|
||||
def gather_icl_versions(conf, versions):
|
||||
version_pattern = re.compile('^...?.?\....?.?')
|
||||
try:
|
||||
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
|
||||
except WindowsError:
|
||||
try:
|
||||
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
|
||||
except WindowsError:
|
||||
return
|
||||
index = 0
|
||||
while 1:
|
||||
try:
|
||||
version = _winreg.EnumKey(all_versions, index)
|
||||
except WindowsError:
|
||||
break
|
||||
index = index + 1
|
||||
if not version_pattern.match(version):
|
||||
continue
|
||||
targets = []
|
||||
for target,arch in all_icl_platforms:
|
||||
try:
|
||||
icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
|
||||
path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
|
||||
if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
|
||||
try:
|
||||
targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
|
||||
except Configure.ConfigurationError:
|
||||
pass
|
||||
except WindowsError:
|
||||
continue
|
||||
major = version[0:2]
|
||||
versions.append(('intel ' + major, targets))
|
||||
|
||||
@conf
|
||||
def get_msvc_versions(conf):
|
||||
if not conf.env.MSVC_INSTALLED_VERSIONS:
|
||||
lst = []
|
||||
conf.gather_msvc_versions(lst)
|
||||
conf.gather_wsdk_versions(lst)
|
||||
conf.gather_icl_versions(lst)
|
||||
conf.env.MSVC_INSTALLED_VERSIONS = lst
|
||||
return conf.env.MSVC_INSTALLED_VERSIONS
|
||||
|
||||
@conf
|
||||
def print_all_msvc_detected(conf):
|
||||
for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
|
||||
info(version)
|
||||
for target,l in targets:
|
||||
info("\t"+target)
|
||||
|
||||
def detect_msvc(conf):
|
||||
versions = get_msvc_versions(conf)
|
||||
return setup_msvc(conf, versions)
|
||||
|
||||
@conf
|
||||
def find_lt_names_msvc(self, libname, is_static=False):
|
||||
"""
|
||||
Win32/MSVC specific code to glean out information from libtool la files.
|
||||
this function is not attached to the task_gen class
|
||||
"""
|
||||
lt_names=[
|
||||
'lib%s.la' % libname,
|
||||
'%s.la' % libname,
|
||||
]
|
||||
|
||||
for path in self.env['LIBPATH']:
|
||||
for la in lt_names:
|
||||
laf=os.path.join(path,la)
|
||||
dll=None
|
||||
if os.path.exists(laf):
|
||||
ltdict=read_la_file(laf)
|
||||
lt_libdir=None
|
||||
if ltdict.get('libdir', ''):
|
||||
lt_libdir = ltdict['libdir']
|
||||
if not is_static and ltdict.get('library_names', ''):
|
||||
dllnames=ltdict['library_names'].split()
|
||||
dll=dllnames[0].lower()
|
||||
dll=re.sub('\.dll$', '', dll)
|
||||
return (lt_libdir, dll, False)
|
||||
elif ltdict.get('old_library', ''):
|
||||
olib=ltdict['old_library']
|
||||
if os.path.exists(os.path.join(path,olib)):
|
||||
return (path, olib, True)
|
||||
elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
|
||||
return (lt_libdir, olib, True)
|
||||
else:
|
||||
return (None, olib, True)
|
||||
else:
|
||||
raise Utils.WafError('invalid libtool object file: %s' % laf)
|
||||
return (None, None, None)
|
||||
|
||||
@conf
|
||||
def libname_msvc(self, libname, is_static=False, mandatory=False):
|
||||
lib = libname.lower()
|
||||
lib = re.sub('\.lib$','',lib)
|
||||
|
||||
if lib in g_msvc_systemlibs:
|
||||
return lib
|
||||
|
||||
lib=re.sub('^lib','',lib)
|
||||
|
||||
if lib == 'm':
|
||||
return None
|
||||
|
||||
(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
|
||||
|
||||
if lt_path != None and lt_libname != None:
|
||||
if lt_static == True:
|
||||
# file existence check has been made by find_lt_names
|
||||
return os.path.join(lt_path,lt_libname)
|
||||
|
||||
if lt_path != None:
|
||||
_libpaths=[lt_path] + self.env['LIBPATH']
|
||||
else:
|
||||
_libpaths=self.env['LIBPATH']
|
||||
|
||||
static_libs=[
|
||||
'lib%ss.lib' % lib,
|
||||
'lib%s.lib' % lib,
|
||||
'%ss.lib' % lib,
|
||||
'%s.lib' %lib,
|
||||
]
|
||||
|
||||
dynamic_libs=[
|
||||
'lib%s.dll.lib' % lib,
|
||||
'lib%s.dll.a' % lib,
|
||||
'%s.dll.lib' % lib,
|
||||
'%s.dll.a' % lib,
|
||||
'lib%s_d.lib' % lib,
|
||||
'%s_d.lib' % lib,
|
||||
'%s.lib' %lib,
|
||||
]
|
||||
|
||||
libnames=static_libs
|
||||
if not is_static:
|
||||
libnames=dynamic_libs + static_libs
|
||||
|
||||
for path in _libpaths:
|
||||
for libn in libnames:
|
||||
if os.path.exists(os.path.join(path, libn)):
|
||||
debug('msvc: lib found: %s', os.path.join(path,libn))
|
||||
return re.sub('\.lib$', '',libn)
|
||||
|
||||
#if no lib can be found, just return the libname as msvc expects it
|
||||
if mandatory:
|
||||
self.fatal("The library %r could not be found" % libname)
|
||||
return re.sub('\.lib$', '', libname)
|
||||
|
||||
@conf
|
||||
def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
|
||||
"This is the api to use"
|
||||
libn = self.libname_msvc(libname, is_static, mandatory)
|
||||
|
||||
if not uselib_store:
|
||||
uselib_store = libname.upper()
|
||||
|
||||
# Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
|
||||
# but we don't distinguish static libs from shared libs.
|
||||
# This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
|
||||
if False and is_static: # disabled
|
||||
self.env['STATICLIB_' + uselib_store] = [libn]
|
||||
else:
|
||||
self.env['LIB_' + uselib_store] = [libn]
|
||||
|
||||
@conf
|
||||
def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
|
||||
for libname in Utils.to_list(libnames):
|
||||
self.check_lib_msvc(libname, is_static, mandatory=mandatory)
|
||||
|
||||
@conftest
|
||||
def no_autodetect(conf):
|
||||
conf.eval_rules(detect.replace('autodetect', ''))
|
||||
|
||||
|
||||
detect = '''
|
||||
autodetect
|
||||
find_msvc
|
||||
msvc_common_flags
|
||||
cc_load_tools
|
||||
cxx_load_tools
|
||||
cc_add_flags
|
||||
cxx_add_flags
|
||||
link_add_flags
|
||||
'''
|
||||
|
||||
@conftest
|
||||
def autodetect(conf):
|
||||
v = conf.env
|
||||
compiler, version, path, includes, libdirs = detect_msvc(conf)
|
||||
v['PATH'] = path
|
||||
v['CPPPATH'] = includes
|
||||
v['LIBPATH'] = libdirs
|
||||
v['MSVC_COMPILER'] = compiler
|
||||
|
||||
def _get_prog_names(conf, compiler):
|
||||
if compiler=='intel':
|
||||
compiler_name = 'ICL'
|
||||
linker_name = 'XILINK'
|
||||
lib_name = 'XILIB'
|
||||
else:
|
||||
# assumes CL.exe
|
||||
compiler_name = 'CL'
|
||||
linker_name = 'LINK'
|
||||
lib_name = 'LIB'
|
||||
return compiler_name, linker_name, lib_name
|
||||
|
||||
@conftest
|
||||
def find_msvc(conf):
|
||||
# due to path format limitations, limit operation only to native Win32. Yeah it sucks.
|
||||
if sys.platform != 'win32':
|
||||
conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
|
||||
|
||||
v = conf.env
|
||||
|
||||
compiler, version, path, includes, libdirs = detect_msvc(conf)
|
||||
|
||||
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
|
||||
has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11)
|
||||
|
||||
# compiler
|
||||
cxx = None
|
||||
if v.CXX: cxx = v.CXX
|
||||
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
|
||||
if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
|
||||
cxx = conf.cmd_to_list(cxx)
|
||||
|
||||
# before setting anything, check if the compiler is really msvc
|
||||
env = dict(conf.environ)
|
||||
env.update(PATH = ';'.join(path))
|
||||
if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
|
||||
conf.fatal('the msvc compiler could not be identified')
|
||||
|
||||
link = v.LINK_CXX
|
||||
if not link:
|
||||
link = conf.find_program(linker_name, path_list=path, mandatory=True)
|
||||
ar = v.AR
|
||||
if not ar:
|
||||
ar = conf.find_program(lib_name, path_list=path, mandatory=True)
|
||||
|
||||
# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
|
||||
mt = v.MT
|
||||
if has_msvc_manifest:
|
||||
mt = conf.find_program('MT', path_list=path, mandatory=True)
|
||||
|
||||
# no more possibility of failure means the data state will be consistent
|
||||
# we may store the data safely now
|
||||
|
||||
v.MSVC_MANIFEST = has_msvc_manifest
|
||||
v.PATH = path
|
||||
v.CPPPATH = includes
|
||||
v.LIBPATH = libdirs
|
||||
|
||||
# c/c++ compiler
|
||||
v.CC = v.CXX = cxx
|
||||
v.CC_NAME = v.CXX_NAME = 'msvc'
|
||||
|
||||
v.LINK = v.LINK_CXX = link
|
||||
if not v.LINK_CC:
|
||||
v.LINK_CC = v.LINK_CXX
|
||||
|
||||
v.AR = ar
|
||||
v.MT = mt
|
||||
v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
|
||||
|
||||
|
||||
conf.check_tool('winres')
|
||||
|
||||
if not conf.env.WINRC:
|
||||
warn('Resource compiler not found. Compiling resource file is disabled')
|
||||
|
||||
# environment flags
|
||||
try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
|
||||
except KeyError: pass
|
||||
try: v.prepend_value('LIBPATH', conf.environ['LIB'])
|
||||
except KeyError: pass
|
||||
|
||||
@conftest
|
||||
def msvc_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
v['CPPFLAGS'] = ['/W3', '/nologo']
|
||||
|
||||
v['CCDEFINES_ST'] = '/D%s'
|
||||
v['CXXDEFINES_ST'] = '/D%s'
|
||||
|
||||
# TODO just use _WIN32, which defined by the compiler itself!
|
||||
v['CCDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
|
||||
v['CXXDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
|
||||
|
||||
v['_CCINCFLAGS'] = []
|
||||
v['_CCDEFFLAGS'] = []
|
||||
v['_CXXINCFLAGS'] = []
|
||||
v['_CXXDEFFLAGS'] = []
|
||||
|
||||
v['CC_SRC_F'] = ''
|
||||
v['CC_TGT_F'] = ['/c', '/Fo']
|
||||
v['CXX_SRC_F'] = ''
|
||||
v['CXX_TGT_F'] = ['/c', '/Fo']
|
||||
|
||||
v['CPPPATH_ST'] = '/I%s' # template for adding include paths
|
||||
|
||||
v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
|
||||
|
||||
# Subsystem specific flags
|
||||
v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE']
|
||||
v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE']
|
||||
v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX']
|
||||
v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS']
|
||||
v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
|
||||
|
||||
# CRT specific flags
|
||||
v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
|
||||
v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
|
||||
|
||||
# TODO these are defined by the compiler itself!
|
||||
v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
|
||||
v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
|
||||
|
||||
v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
|
||||
v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
|
||||
|
||||
# TODO these are defined by the compiler itself!
|
||||
v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
|
||||
v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
|
||||
|
||||
# compiler debug levels
|
||||
v['CCFLAGS'] = ['/TC']
|
||||
v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
|
||||
v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
|
||||
v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
|
||||
v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
|
||||
|
||||
v['CXXFLAGS'] = ['/TP', '/EHsc']
|
||||
v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
|
||||
v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
|
||||
|
||||
v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
|
||||
v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
|
||||
|
||||
# linker
|
||||
v['LIB'] = []
|
||||
|
||||
v['LIB_ST'] = '%s.lib' # template for adding libs
|
||||
v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths
|
||||
v['STATICLIB_ST'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
|
||||
v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
|
||||
|
||||
v['LINKFLAGS'] = ['/NOLOGO']
|
||||
if v['MSVC_MANIFEST']:
|
||||
v.append_value('LINKFLAGS', '/MANIFEST')
|
||||
v['LINKFLAGS_DEBUG'] = ['/DEBUG']
|
||||
v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG']
|
||||
|
||||
# shared library
|
||||
v['shlib_CCFLAGS'] = ['']
|
||||
v['shlib_CXXFLAGS'] = ['']
|
||||
v['shlib_LINKFLAGS']= ['/DLL']
|
||||
v['shlib_PATTERN'] = '%s.dll'
|
||||
v['implib_PATTERN'] = '%s.lib'
|
||||
v['IMPLIB_ST'] = '/IMPLIB:%s'
|
||||
|
||||
# static library
|
||||
v['staticlib_LINKFLAGS'] = ['']
|
||||
v['staticlib_PATTERN'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
|
||||
|
||||
# program
|
||||
v['program_PATTERN'] = '%s.exe'
|
||||
|
||||
|
||||
#######################################################################################################
|
||||
##### conf above, build below
|
||||
|
||||
@after('apply_link')
|
||||
@feature('c', 'cc', 'cxx')
|
||||
def apply_flags_msvc(self):
|
||||
if self.env.CC_NAME != 'msvc' or not self.link_task:
|
||||
return
|
||||
|
||||
subsystem = getattr(self, 'subsystem', '')
|
||||
if subsystem:
|
||||
subsystem = '/subsystem:%s' % subsystem
|
||||
flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
|
||||
self.env.append_value(flags, subsystem)
|
||||
|
||||
if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features:
|
||||
for f in self.env.LINKFLAGS:
|
||||
d = f.lower()
|
||||
if d[1:] == 'debug':
|
||||
pdbnode = self.link_task.outputs[0].change_ext('.pdb')
|
||||
pdbfile = pdbnode.bldpath(self.env)
|
||||
self.link_task.outputs.append(pdbnode)
|
||||
self.bld.install_files(self.install_path, [pdbnode], env=self.env)
|
||||
break
|
||||
|
||||
@feature('cprogram', 'cshlib', 'cstaticlib')
|
||||
@after('apply_lib_vars')
|
||||
@before('apply_obj_vars')
|
||||
def apply_obj_vars_msvc(self):
|
||||
if self.env['CC_NAME'] != 'msvc':
|
||||
return
|
||||
|
||||
try:
|
||||
self.meths.remove('apply_obj_vars')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
libpaths = getattr(self, 'libpaths', [])
|
||||
if not libpaths: self.libpaths = libpaths
|
||||
|
||||
env = self.env
|
||||
app = env.append_unique
|
||||
|
||||
cpppath_st = env['CPPPATH_ST']
|
||||
lib_st = env['LIB_ST']
|
||||
staticlib_st = env['STATICLIB_ST']
|
||||
libpath_st = env['LIBPATH_ST']
|
||||
staticlibpath_st = env['STATICLIBPATH_ST']
|
||||
|
||||
for i in env['LIBPATH']:
|
||||
app('LINKFLAGS', libpath_st % i)
|
||||
if not libpaths.count(i):
|
||||
libpaths.append(i)
|
||||
|
||||
for i in env['LIBPATH']:
|
||||
app('LINKFLAGS', staticlibpath_st % i)
|
||||
if not libpaths.count(i):
|
||||
libpaths.append(i)
|
||||
|
||||
# i doubt that anyone will make a fully static binary anyway
|
||||
if not env['FULLSTATIC']:
|
||||
if env['STATICLIB'] or env['LIB']:
|
||||
app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
|
||||
|
||||
for i in env['STATICLIB']:
|
||||
app('LINKFLAGS', staticlib_st % i)
|
||||
|
||||
for i in env['LIB']:
|
||||
app('LINKFLAGS', lib_st % i)
|
||||
|
||||
# split the manifest file processing from the link task, like for the rc processing
|
||||
|
||||
@feature('cprogram', 'cshlib')
|
||||
@after('apply_link')
|
||||
def apply_manifest(self):
|
||||
"""Special linker for MSVC with support for embedding manifests into DLL's
|
||||
and executables compiled by Visual Studio 2005 or probably later. Without
|
||||
the manifest file, the binaries are unusable.
|
||||
See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
|
||||
|
||||
if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
|
||||
out_node = self.link_task.outputs[0]
|
||||
man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
|
||||
self.link_task.outputs.append(man_node)
|
||||
self.link_task.do_manifest = True
|
||||
|
||||
def exec_mf(self):
|
||||
env = self.env
|
||||
mtool = env['MT']
|
||||
if not mtool:
|
||||
return 0
|
||||
|
||||
self.do_manifest = False
|
||||
|
||||
outfile = self.outputs[0].bldpath(env)
|
||||
|
||||
manifest = None
|
||||
for out_node in self.outputs:
|
||||
if out_node.name.endswith('.manifest'):
|
||||
manifest = out_node.bldpath(env)
|
||||
break
|
||||
if manifest is None:
|
||||
# Should never get here. If we do, it means the manifest file was
|
||||
# never added to the outputs list, thus we don't have a manifest file
|
||||
# to embed, so we just return.
|
||||
return 0
|
||||
|
||||
# embedding mode. Different for EXE's and DLL's.
|
||||
# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
|
||||
mode = ''
|
||||
if 'cprogram' in self.generator.features:
|
||||
mode = '1'
|
||||
elif 'cshlib' in self.generator.features:
|
||||
mode = '2'
|
||||
|
||||
debug('msvc: embedding manifest')
|
||||
#flags = ' '.join(env['MTFLAGS'] or [])
|
||||
|
||||
lst = []
|
||||
lst.extend([env['MT']])
|
||||
lst.extend(Utils.to_list(env['MTFLAGS']))
|
||||
lst.extend(Utils.to_list("-manifest"))
|
||||
lst.extend(Utils.to_list(manifest))
|
||||
lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
|
||||
|
||||
#cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
|
||||
# manifest, outfile, mode)
|
||||
lst = [lst]
|
||||
return self.exec_command(*lst)
|
||||
|
||||
########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
|
||||
|
||||
def exec_command_msvc(self, *k, **kw):
|
||||
"instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
|
||||
if self.env['CC_NAME'] == 'msvc':
|
||||
if isinstance(k[0], list):
|
||||
lst = []
|
||||
carry = ''
|
||||
for a in k[0]:
|
||||
if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
|
||||
carry = a
|
||||
else:
|
||||
lst.append(carry + a)
|
||||
carry = ''
|
||||
k = [lst]
|
||||
|
||||
env = dict(os.environ)
|
||||
env.update(PATH = ';'.join(self.env['PATH']))
|
||||
kw['env'] = env
|
||||
|
||||
ret = self.generator.bld.exec_command(*k, **kw)
|
||||
if ret: return ret
|
||||
if getattr(self, 'do_manifest', None):
|
||||
ret = exec_mf(self)
|
||||
return ret
|
||||
|
||||
for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
|
||||
cls = Task.TaskBase.classes.get(k, None)
|
||||
if cls:
|
||||
cls.exec_command = exec_command_msvc
|
48
third_party/waf/wafadmin/Tools/nasm.py
vendored
48
third_party/waf/wafadmin/Tools/nasm.py
vendored
@ -1,48 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008
|
||||
|
||||
"""
|
||||
Nasm processing
|
||||
"""
|
||||
|
||||
import os
|
||||
import TaskGen, Task, Utils
|
||||
from TaskGen import taskgen, before, extension
|
||||
|
||||
nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
|
||||
|
||||
EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
|
||||
|
||||
@before('apply_link')
|
||||
def apply_nasm_vars(self):
|
||||
|
||||
# flags
|
||||
if hasattr(self, 'nasm_flags'):
|
||||
for flag in self.to_list(self.nasm_flags):
|
||||
self.env.append_value('NASM_FLAGS', flag)
|
||||
|
||||
# includes - well, if we suppose it works with c processing
|
||||
if hasattr(self, 'includes'):
|
||||
for inc in self.to_list(self.includes):
|
||||
node = self.path.find_dir(inc)
|
||||
if not node:
|
||||
raise Utils.WafError('cannot find the dir' + inc)
|
||||
self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
|
||||
self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
|
||||
|
||||
@extension(EXT_NASM)
|
||||
def nasm_file(self, node):
|
||||
try: obj_ext = self.obj_ext
|
||||
except AttributeError: obj_ext = '_%d.o' % self.idx
|
||||
|
||||
task = self.create_task('nasm', node, node.change_ext(obj_ext))
|
||||
self.compiled_tasks.append(task)
|
||||
|
||||
self.meths.append('apply_nasm_vars')
|
||||
|
||||
# create our action here
|
||||
Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
|
||||
|
||||
def detect(conf):
|
||||
nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)
|
297
third_party/waf/wafadmin/Tools/ocaml.py
vendored
297
third_party/waf/wafadmin/Tools/ocaml.py
vendored
@ -1,297 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"ocaml support"
|
||||
|
||||
import os, re
|
||||
import TaskGen, Utils, Task, Build
|
||||
from Logs import error
|
||||
from TaskGen import taskgen, feature, before, after, extension
|
||||
|
||||
EXT_MLL = ['.mll']
|
||||
EXT_MLY = ['.mly']
|
||||
EXT_MLI = ['.mli']
|
||||
EXT_MLC = ['.c']
|
||||
EXT_ML = ['.ml']
|
||||
|
||||
open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
|
||||
foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
|
||||
def filter_comments(txt):
|
||||
meh = [0]
|
||||
def repl(m):
|
||||
if m.group(1): meh[0] += 1
|
||||
elif m.group(2): meh[0] -= 1
|
||||
elif not meh[0]: return m.group(0)
|
||||
return ''
|
||||
return foo.sub(repl, txt)
|
||||
|
||||
def scan(self):
|
||||
node = self.inputs[0]
|
||||
code = filter_comments(node.read(self.env))
|
||||
|
||||
global open_re
|
||||
names = []
|
||||
import_iterator = open_re.finditer(code)
|
||||
if import_iterator:
|
||||
for import_match in import_iterator:
|
||||
names.append(import_match.group(1))
|
||||
found_lst = []
|
||||
raw_lst = []
|
||||
for name in names:
|
||||
nd = None
|
||||
for x in self.incpaths:
|
||||
nd = x.find_resource(name.lower()+'.ml')
|
||||
if not nd: nd = x.find_resource(name+'.ml')
|
||||
if nd:
|
||||
found_lst.append(nd)
|
||||
break
|
||||
else:
|
||||
raw_lst.append(name)
|
||||
|
||||
return (found_lst, raw_lst)
|
||||
|
||||
native_lst=['native', 'all', 'c_object']
|
||||
bytecode_lst=['bytecode', 'all']
|
||||
class ocaml_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@feature('ocaml')
|
||||
def init_ml(self):
|
||||
Utils.def_attrs(self,
|
||||
type = 'all',
|
||||
incpaths_lst = [],
|
||||
bld_incpaths_lst = [],
|
||||
mlltasks = [],
|
||||
mlytasks = [],
|
||||
mlitasks = [],
|
||||
native_tasks = [],
|
||||
bytecode_tasks = [],
|
||||
linktasks = [],
|
||||
bytecode_env = None,
|
||||
native_env = None,
|
||||
compiled_tasks = [],
|
||||
includes = '',
|
||||
uselib = '',
|
||||
are_deps_set = 0)
|
||||
|
||||
@feature('ocaml')
|
||||
@after('init_ml')
|
||||
def init_envs_ml(self):
|
||||
|
||||
self.islibrary = getattr(self, 'islibrary', False)
|
||||
|
||||
global native_lst, bytecode_lst
|
||||
self.native_env = None
|
||||
if self.type in native_lst:
|
||||
self.native_env = self.env.copy()
|
||||
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
|
||||
|
||||
self.bytecode_env = None
|
||||
if self.type in bytecode_lst:
|
||||
self.bytecode_env = self.env.copy()
|
||||
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
|
||||
|
||||
if self.type == 'c_object':
|
||||
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
|
||||
|
||||
@feature('ocaml')
|
||||
@before('apply_vars_ml')
|
||||
@after('init_envs_ml')
|
||||
def apply_incpaths_ml(self):
|
||||
inc_lst = self.includes.split()
|
||||
lst = self.incpaths_lst
|
||||
for dir in inc_lst:
|
||||
node = self.path.find_dir(dir)
|
||||
if not node:
|
||||
error("node not found: " + str(dir))
|
||||
continue
|
||||
self.bld.rescan(node)
|
||||
if not node in lst: lst.append(node)
|
||||
self.bld_incpaths_lst.append(node)
|
||||
# now the nodes are added to self.incpaths_lst
|
||||
|
||||
@feature('ocaml')
|
||||
@before('apply_core')
|
||||
def apply_vars_ml(self):
|
||||
for i in self.incpaths_lst:
|
||||
if self.bytecode_env:
|
||||
app = self.bytecode_env.append_value
|
||||
app('OCAMLPATH', '-I')
|
||||
app('OCAMLPATH', i.srcpath(self.env))
|
||||
app('OCAMLPATH', '-I')
|
||||
app('OCAMLPATH', i.bldpath(self.env))
|
||||
|
||||
if self.native_env:
|
||||
app = self.native_env.append_value
|
||||
app('OCAMLPATH', '-I')
|
||||
app('OCAMLPATH', i.bldpath(self.env))
|
||||
app('OCAMLPATH', '-I')
|
||||
app('OCAMLPATH', i.srcpath(self.env))
|
||||
|
||||
varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
|
||||
for name in self.uselib.split():
|
||||
for vname in varnames:
|
||||
cnt = self.env[vname+'_'+name]
|
||||
if cnt:
|
||||
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
|
||||
if self.native_env: self.native_env.append_value(vname, cnt)
|
||||
|
||||
@feature('ocaml')
|
||||
@after('apply_core')
|
||||
def apply_link_ml(self):
|
||||
|
||||
if self.bytecode_env:
|
||||
ext = self.islibrary and '.cma' or '.run'
|
||||
|
||||
linktask = self.create_task('ocalink')
|
||||
linktask.bytecode = 1
|
||||
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
|
||||
linktask.obj = self
|
||||
linktask.env = self.bytecode_env
|
||||
self.linktasks.append(linktask)
|
||||
|
||||
if self.native_env:
|
||||
if self.type == 'c_object': ext = '.o'
|
||||
elif self.islibrary: ext = '.cmxa'
|
||||
else: ext = ''
|
||||
|
||||
linktask = self.create_task('ocalinkx')
|
||||
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
|
||||
linktask.obj = self
|
||||
linktask.env = self.native_env
|
||||
self.linktasks.append(linktask)
|
||||
|
||||
# we produce a .o file to be used by gcc
|
||||
self.compiled_tasks.append(linktask)
|
||||
|
||||
@extension(EXT_MLL)
|
||||
def mll_hook(self, node):
|
||||
mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env)
|
||||
self.mlltasks.append(mll_task)
|
||||
|
||||
self.allnodes.append(mll_task.outputs[0])
|
||||
|
||||
@extension(EXT_MLY)
|
||||
def mly_hook(self, node):
|
||||
mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env)
|
||||
self.mlytasks.append(mly_task)
|
||||
self.allnodes.append(mly_task.outputs[0])
|
||||
|
||||
task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env)
|
||||
|
||||
@extension(EXT_MLI)
|
||||
def mli_hook(self, node):
|
||||
task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env)
|
||||
self.mlitasks.append(task)
|
||||
|
||||
@extension(EXT_MLC)
|
||||
def mlc_hook(self, node):
|
||||
task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env)
|
||||
self.compiled_tasks.append(task)
|
||||
|
||||
@extension(EXT_ML)
|
||||
def ml_hook(self, node):
|
||||
if self.native_env:
|
||||
task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env)
|
||||
task.obj = self
|
||||
task.incpaths = self.bld_incpaths_lst
|
||||
self.native_tasks.append(task)
|
||||
|
||||
if self.bytecode_env:
|
||||
task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env)
|
||||
task.obj = self
|
||||
task.bytecode = 1
|
||||
task.incpaths = self.bld_incpaths_lst
|
||||
self.bytecode_tasks.append(task)
|
||||
|
||||
def compile_may_start(self):
|
||||
if not getattr(self, 'flag_deps', ''):
|
||||
self.flag_deps = 1
|
||||
|
||||
# the evil part is that we can only compute the dependencies after the
|
||||
# source files can be read (this means actually producing the source files)
|
||||
if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
|
||||
else: alltasks = self.obj.native_tasks
|
||||
|
||||
self.signature() # ensure that files are scanned - unfortunately
|
||||
tree = self.generator.bld
|
||||
env = self.env
|
||||
for node in self.inputs:
|
||||
lst = tree.node_deps[self.unique_id()]
|
||||
for depnode in lst:
|
||||
for t in alltasks:
|
||||
if t == self: continue
|
||||
if depnode in t.inputs:
|
||||
self.set_run_after(t)
|
||||
|
||||
# TODO necessary to get the signature right - for now
|
||||
delattr(self, 'cache_sig')
|
||||
self.signature()
|
||||
|
||||
return Task.Task.runnable_status(self)
|
||||
|
||||
b = Task.simple_task_type
|
||||
cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
|
||||
cls.runnable_status = compile_may_start
|
||||
cls.scan = scan
|
||||
|
||||
b = Task.simple_task_type
|
||||
cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
|
||||
cls.runnable_status = compile_may_start
|
||||
cls.scan = scan
|
||||
|
||||
|
||||
b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
|
||||
b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
|
||||
|
||||
b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
|
||||
b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
|
||||
|
||||
|
||||
def link_may_start(self):
|
||||
if not getattr(self, 'order', ''):
|
||||
|
||||
# now reorder the inputs given the task dependencies
|
||||
if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
|
||||
else: alltasks = self.obj.native_tasks
|
||||
|
||||
# this part is difficult, we do not have a total order on the tasks
|
||||
# if the dependencies are wrong, this may not stop
|
||||
seen = []
|
||||
pendant = []+alltasks
|
||||
while pendant:
|
||||
task = pendant.pop(0)
|
||||
if task in seen: continue
|
||||
for x in task.run_after:
|
||||
if not x in seen:
|
||||
pendant.append(task)
|
||||
break
|
||||
else:
|
||||
seen.append(task)
|
||||
self.inputs = [x.outputs[0] for x in seen]
|
||||
self.order = 1
|
||||
return Task.Task.runnable_status(self)
|
||||
|
||||
act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
|
||||
act.runnable_status = link_may_start
|
||||
act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
|
||||
act.runnable_status = link_may_start
|
||||
|
||||
def detect(conf):
|
||||
opt = conf.find_program('ocamlopt', var='OCAMLOPT')
|
||||
occ = conf.find_program('ocamlc', var='OCAMLC')
|
||||
if (not opt) or (not occ):
|
||||
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
|
||||
|
||||
v = conf.env
|
||||
v['OCAMLC'] = occ
|
||||
v['OCAMLOPT'] = opt
|
||||
v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX')
|
||||
v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC')
|
||||
v['OCAMLFLAGS'] = ''
|
||||
v['OCAMLLIB'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
|
||||
v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
|
||||
v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
|
||||
v['LIB_OCAML'] = 'camlrun'
|
187
third_party/waf/wafadmin/Tools/osx.py
vendored
187
third_party/waf/wafadmin/Tools/osx.py
vendored
@ -1,187 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy 2008
|
||||
|
||||
"""MacOSX related tools
|
||||
|
||||
To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
|
||||
obj.mac_app = True
|
||||
|
||||
To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
|
||||
obj.mac_bundle = True
|
||||
"""
|
||||
|
||||
import os, shutil, sys, platform
|
||||
import TaskGen, Task, Build, Options, Utils
|
||||
from TaskGen import taskgen, feature, after, before
|
||||
from Logs import error, debug
|
||||
|
||||
# plist template
|
||||
app_info = '''
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
|
||||
<plist version="0.9">
|
||||
<dict>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleGetInfoString</key>
|
||||
<string>Created by Waf</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>NOTE</key>
|
||||
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>%s</string>
|
||||
</dict>
|
||||
</plist>
|
||||
'''
|
||||
|
||||
# see WAF issue 285
|
||||
# and also http://trac.macports.org/ticket/17059
|
||||
@feature('c', 'cc', 'cxx')
|
||||
@before('apply_lib_vars')
|
||||
def set_macosx_deployment_target(self):
|
||||
if self.env['MACOSX_DEPLOYMENT_TARGET']:
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
|
||||
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
|
||||
if sys.platform == 'darwin':
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
|
||||
|
||||
@feature('c', 'cc', 'cxx')
|
||||
@after('apply_lib_vars')
|
||||
def apply_framework(self):
|
||||
for x in self.to_list(self.env['FRAMEWORKPATH']):
|
||||
frameworkpath_st = '-F%s'
|
||||
self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
|
||||
self.env.append_unique('CCFLAGS', frameworkpath_st % x)
|
||||
self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
|
||||
|
||||
for x in self.to_list(self.env['FRAMEWORK']):
|
||||
self.env.append_value('LINKFLAGS', ['-framework', x])
|
||||
|
||||
@taskgen
|
||||
def create_bundle_dirs(self, name, out):
|
||||
bld = self.bld
|
||||
dir = out.parent.get_dir(name)
|
||||
|
||||
if not dir:
|
||||
dir = out.__class__(name, out.parent, 1)
|
||||
bld.rescan(dir)
|
||||
contents = out.__class__('Contents', dir, 1)
|
||||
bld.rescan(contents)
|
||||
macos = out.__class__('MacOS', contents, 1)
|
||||
bld.rescan(macos)
|
||||
return dir
|
||||
|
||||
def bundle_name_for_output(out):
|
||||
name = out.name
|
||||
k = name.rfind('.')
|
||||
if k >= 0:
|
||||
name = name[:k] + '.app'
|
||||
else:
|
||||
name = name + '.app'
|
||||
return name
|
||||
|
||||
@taskgen
|
||||
@after('apply_link')
|
||||
@feature('cprogram')
|
||||
def create_task_macapp(self):
|
||||
"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
|
||||
or use obj.mac_app = True to build specific targets as Mac apps"""
|
||||
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
|
||||
apptask = self.create_task('macapp')
|
||||
apptask.set_inputs(self.link_task.outputs)
|
||||
|
||||
out = self.link_task.outputs[0]
|
||||
|
||||
name = bundle_name_for_output(out)
|
||||
dir = self.create_bundle_dirs(name, out)
|
||||
|
||||
n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
|
||||
|
||||
apptask.set_outputs([n1])
|
||||
apptask.chmod = 0755
|
||||
apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
|
||||
self.apptask = apptask
|
||||
|
||||
@after('apply_link')
|
||||
@feature('cprogram')
|
||||
def create_task_macplist(self):
|
||||
"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
|
||||
or use obj.mac_app = True to build specific targets as Mac apps"""
|
||||
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
|
||||
# check if the user specified a plist before using our template
|
||||
if not getattr(self, 'mac_plist', False):
|
||||
self.mac_plist = app_info
|
||||
|
||||
plisttask = self.create_task('macplist')
|
||||
plisttask.set_inputs(self.link_task.outputs)
|
||||
|
||||
out = self.link_task.outputs[0]
|
||||
self.mac_plist = self.mac_plist % (out.name)
|
||||
|
||||
name = bundle_name_for_output(out)
|
||||
dir = self.create_bundle_dirs(name, out)
|
||||
|
||||
n1 = dir.find_or_declare(['Contents', 'Info.plist'])
|
||||
|
||||
plisttask.set_outputs([n1])
|
||||
plisttask.mac_plist = self.mac_plist
|
||||
plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
|
||||
self.plisttask = plisttask
|
||||
|
||||
@after('apply_link')
|
||||
@feature('cshlib')
|
||||
def apply_link_osx(self):
|
||||
name = self.link_task.outputs[0].name
|
||||
if not self.install_path:
|
||||
return
|
||||
if getattr(self, 'vnum', None):
|
||||
name = name.replace('.dylib', '.%s.dylib' % self.vnum)
|
||||
|
||||
path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
|
||||
if '-dynamiclib' in self.env['LINKFLAGS']:
|
||||
self.env.append_value('LINKFLAGS', '-install_name')
|
||||
self.env.append_value('LINKFLAGS', path)
|
||||
|
||||
@before('apply_link', 'apply_lib_vars')
|
||||
@feature('c', 'cc', 'cxx')
|
||||
def apply_bundle(self):
|
||||
"""use env['MACBUNDLE'] to force all shlibs into mac bundles
|
||||
or use obj.mac_bundle = True for specific targets only"""
|
||||
if not ('cshlib' in self.features or 'shlib' in self.features): return
|
||||
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
|
||||
self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
|
||||
uselib = self.uselib = self.to_list(self.uselib)
|
||||
if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
|
||||
|
||||
@after('apply_link')
|
||||
@feature('cshlib')
|
||||
def apply_bundle_remove_dynamiclib(self):
|
||||
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
|
||||
if not getattr(self, 'vnum', None):
|
||||
try:
|
||||
self.env['LINKFLAGS'].remove('-dynamiclib')
|
||||
self.env['LINKFLAGS'].remove('-single_module')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# TODO REMOVE IN 1.6 (global variable)
|
||||
app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
|
||||
|
||||
def app_build(task):
|
||||
env = task.env
|
||||
shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
|
||||
|
||||
return 0
|
||||
|
||||
def plist_build(task):
|
||||
env = task.env
|
||||
f = open(task.outputs[0].abspath(env), "w")
|
||||
f.write(task.mac_plist)
|
||||
f.close()
|
||||
|
||||
return 0
|
||||
|
||||
Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
|
||||
Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")
|
108
third_party/waf/wafadmin/Tools/perl.py
vendored
108
third_party/waf/wafadmin/Tools/perl.py
vendored
@ -1,108 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# andersg at 0x63.nu 2007
|
||||
|
||||
import os
|
||||
import Task, Options, Utils
|
||||
from Configure import conf
|
||||
from TaskGen import extension, taskgen, feature, before
|
||||
|
||||
xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
|
||||
EXT_XS = ['.xs']
|
||||
|
||||
@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
|
||||
@feature('perlext')
|
||||
def init_perlext(self):
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', ''))
|
||||
if not 'PERL' in self.uselib: self.uselib.append('PERL')
|
||||
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
|
||||
self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
|
||||
|
||||
@extension(EXT_XS)
|
||||
def xsubpp_file(self, node):
|
||||
outnode = node.change_ext('.c')
|
||||
self.create_task('xsubpp', node, outnode)
|
||||
self.allnodes.append(outnode)
|
||||
|
||||
Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False)
|
||||
|
||||
@conf
|
||||
def check_perl_version(conf, minver=None):
|
||||
"""
|
||||
Checks if perl is installed.
|
||||
|
||||
If installed the variable PERL will be set in environment.
|
||||
|
||||
Perl binary can be overridden by --with-perl-binary config variable
|
||||
|
||||
"""
|
||||
|
||||
if getattr(Options.options, 'perlbinary', None):
|
||||
conf.env.PERL = Options.options.perlbinary
|
||||
else:
|
||||
conf.find_program('perl', var='PERL', mandatory=True)
|
||||
|
||||
try:
|
||||
version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
|
||||
except:
|
||||
conf.fatal('could not determine the perl version')
|
||||
|
||||
conf.env.PERL_VERSION = version
|
||||
cver = ''
|
||||
if minver:
|
||||
try:
|
||||
ver = tuple(map(int, version.split('.')))
|
||||
except:
|
||||
conf.fatal('unsupported perl version %r' % version)
|
||||
if ver < minver:
|
||||
conf.fatal('perl is too old')
|
||||
|
||||
cver = '.'.join(map(str,minver))
|
||||
conf.check_message('perl', cver, True, version)
|
||||
|
||||
@conf
|
||||
def check_perl_module(conf, module):
|
||||
"""
|
||||
Check if specified perlmodule is installed.
|
||||
|
||||
Minimum version can be specified by specifying it after modulename
|
||||
like this:
|
||||
|
||||
conf.check_perl_module("Some::Module 2.92")
|
||||
"""
|
||||
cmd = [conf.env['PERL'], '-e', 'use %s' % module]
|
||||
r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
|
||||
conf.check_message("perl module %s" % module, "", r)
|
||||
return r
|
||||
|
||||
@conf
|
||||
def check_perl_ext_devel(conf):
|
||||
"""
|
||||
Check for configuration needed to build perl extensions.
|
||||
|
||||
Sets different xxx_PERLEXT variables in the environment.
|
||||
|
||||
Also sets the ARCHDIR_PERL variable useful as installation path,
|
||||
which can be overridden by --with-perl-archdir
|
||||
"""
|
||||
if not conf.env.PERL:
|
||||
conf.fatal('perl detection is required first')
|
||||
|
||||
def read_out(cmd):
|
||||
return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
|
||||
|
||||
conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
|
||||
conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"')
|
||||
conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
|
||||
conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
|
||||
conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"')
|
||||
conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0]
|
||||
|
||||
if getattr(Options.options, 'perlarchdir', None):
|
||||
conf.env.ARCHDIR_PERL = Options.options.perlarchdir
|
||||
else:
|
||||
conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
|
||||
opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)
|
837
third_party/waf/wafadmin/Tools/preproc.py
vendored
837
third_party/waf/wafadmin/Tools/preproc.py
vendored
@ -1,837 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2009 (ita)
|
||||
|
||||
"""
|
||||
C/C++ preprocessor for finding dependencies
|
||||
|
||||
Reasons for using the Waf preprocessor by default
|
||||
1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
|
||||
2. Not all compilers provide .d files for obtaining the dependencies (portability)
|
||||
3. A naive file scanner will not catch the constructs such as "#include foo()"
|
||||
4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
|
||||
|
||||
Regarding the speed concerns:
|
||||
a. the preprocessing is performed only when files must be compiled
|
||||
b. the macros are evaluated only for #if/#elif/#include
|
||||
c. the time penalty is about 10%
|
||||
d. system headers are not scanned
|
||||
|
||||
Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
|
||||
during the compilation to track the dependencies (useful when used with the boost libraries).
|
||||
It only works with gcc though, and it cannot be used with Qt builds. A dumb
|
||||
file scanner will be added in the future, so we will have most bahaviours.
|
||||
"""
|
||||
# TODO: more varargs, pragma once
|
||||
# TODO: dumb file scanner tracking all includes
|
||||
|
||||
import re, sys, os, string
|
||||
import Logs, Build, Utils
|
||||
from Logs import debug, error
|
||||
import traceback
|
||||
|
||||
class PreprocError(Utils.WafError):
|
||||
pass
|
||||
|
||||
POPFILE = '-'
|
||||
|
||||
|
||||
recursion_limit = 5000
|
||||
"do not loop too much on header inclusion"
|
||||
|
||||
go_absolute = 0
|
||||
"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
|
||||
|
||||
standard_includes = ['/usr/include']
|
||||
if sys.platform == "win32":
|
||||
standard_includes = []
|
||||
|
||||
use_trigraphs = 0
|
||||
'apply the trigraph rules first'
|
||||
|
||||
strict_quotes = 0
|
||||
"Keep <> for system includes (do not search for those includes)"
|
||||
|
||||
g_optrans = {
|
||||
'not':'!',
|
||||
'and':'&&',
|
||||
'bitand':'&',
|
||||
'and_eq':'&=',
|
||||
'or':'||',
|
||||
'bitor':'|',
|
||||
'or_eq':'|=',
|
||||
'xor':'^',
|
||||
'xor_eq':'^=',
|
||||
'compl':'~',
|
||||
}
|
||||
"these ops are for c++, to reset, set an empty dict"
|
||||
|
||||
# ignore #warning and #error
|
||||
re_lines = re.compile(\
|
||||
'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
|
||||
re.IGNORECASE | re.MULTILINE)
|
||||
|
||||
re_mac = re.compile("^[a-zA-Z_]\w*")
|
||||
re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
|
||||
re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
|
||||
re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
|
||||
re_cpp = re.compile(
|
||||
r"""(/\*[^*]*\*+(?:[^/*][^*]*\*+)*/)|//[^\n]*|("(?:\\.|[^"\\])*"|'(?:\\.|[^'\\])*'|.[^/"'\\]*)""",
|
||||
re.MULTILINE)
|
||||
trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
|
||||
chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
|
||||
|
||||
NUM = 'i'
|
||||
OP = 'O'
|
||||
IDENT = 'T'
|
||||
STR = 's'
|
||||
CHAR = 'c'
|
||||
|
||||
tok_types = [NUM, STR, IDENT, OP]
|
||||
exp_types = [
|
||||
r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
|
||||
r'L?"([^"\\]|\\.)*"',
|
||||
r'[a-zA-Z_]\w*',
|
||||
r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
|
||||
]
|
||||
re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
|
||||
|
||||
accepted = 'a'
|
||||
ignored = 'i'
|
||||
undefined = 'u'
|
||||
skipped = 's'
|
||||
|
||||
def repl(m):
|
||||
if m.group(1):
|
||||
return ' '
|
||||
s = m.group(2)
|
||||
if s is None:
|
||||
return ''
|
||||
return s
|
||||
|
||||
def filter_comments(filename):
|
||||
# return a list of tuples : keyword, line
|
||||
code = Utils.readf(filename)
|
||||
if use_trigraphs:
|
||||
for (a, b) in trig_def: code = code.split(a).join(b)
|
||||
code = re_nl.sub('', code)
|
||||
code = re_cpp.sub(repl, code)
|
||||
return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
|
||||
|
||||
prec = {}
|
||||
# op -> number, needed for such expressions: #if 1 && 2 != 0
|
||||
ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
|
||||
for x in range(len(ops)):
|
||||
syms = ops[x]
|
||||
for u in syms.split():
|
||||
prec[u] = x
|
||||
|
||||
def reduce_nums(val_1, val_2, val_op):
|
||||
"""apply arithmetic rules and try to return an integer result"""
|
||||
#print val_1, val_2, val_op
|
||||
|
||||
# now perform the operation, make certain a and b are numeric
|
||||
try: a = 0 + val_1
|
||||
except TypeError: a = int(val_1)
|
||||
try: b = 0 + val_2
|
||||
except TypeError: b = int(val_2)
|
||||
|
||||
d = val_op
|
||||
if d == '%': c = a%b
|
||||
elif d=='+': c = a+b
|
||||
elif d=='-': c = a-b
|
||||
elif d=='*': c = a*b
|
||||
elif d=='/': c = a/b
|
||||
elif d=='^': c = a^b
|
||||
elif d=='|': c = a|b
|
||||
elif d=='||': c = int(a or b)
|
||||
elif d=='&': c = a&b
|
||||
elif d=='&&': c = int(a and b)
|
||||
elif d=='==': c = int(a == b)
|
||||
elif d=='!=': c = int(a != b)
|
||||
elif d=='<=': c = int(a <= b)
|
||||
elif d=='<': c = int(a < b)
|
||||
elif d=='>': c = int(a > b)
|
||||
elif d=='>=': c = int(a >= b)
|
||||
elif d=='^': c = int(a^b)
|
||||
elif d=='<<': c = a<<b
|
||||
elif d=='>>': c = a>>b
|
||||
else: c = 0
|
||||
return c
|
||||
|
||||
def get_num(lst):
|
||||
if not lst: raise PreprocError("empty list for get_num")
|
||||
(p, v) = lst[0]
|
||||
if p == OP:
|
||||
if v == '(':
|
||||
count_par = 1
|
||||
i = 1
|
||||
while i < len(lst):
|
||||
(p, v) = lst[i]
|
||||
|
||||
if p == OP:
|
||||
if v == ')':
|
||||
count_par -= 1
|
||||
if count_par == 0:
|
||||
break
|
||||
elif v == '(':
|
||||
count_par += 1
|
||||
i += 1
|
||||
else:
|
||||
raise PreprocError("rparen expected %r" % lst)
|
||||
|
||||
(num, _) = get_term(lst[1:i])
|
||||
return (num, lst[i+1:])
|
||||
|
||||
elif v == '+':
|
||||
return get_num(lst[1:])
|
||||
elif v == '-':
|
||||
num, lst = get_num(lst[1:])
|
||||
return (reduce_nums('-1', num, '*'), lst)
|
||||
elif v == '!':
|
||||
num, lst = get_num(lst[1:])
|
||||
return (int(not int(num)), lst)
|
||||
elif v == '~':
|
||||
return (~ int(num), lst)
|
||||
else:
|
||||
raise PreprocError("invalid op token %r for get_num" % lst)
|
||||
elif p == NUM:
|
||||
return v, lst[1:]
|
||||
elif p == IDENT:
|
||||
# all macros should have been replaced, remaining identifiers eval to 0
|
||||
return 0, lst[1:]
|
||||
else:
|
||||
raise PreprocError("invalid token %r for get_num" % lst)
|
||||
|
||||
def get_term(lst):
|
||||
if not lst: raise PreprocError("empty list for get_term")
|
||||
num, lst = get_num(lst)
|
||||
if not lst:
|
||||
return (num, [])
|
||||
(p, v) = lst[0]
|
||||
if p == OP:
|
||||
if v == '&&' and not num:
|
||||
return (num, [])
|
||||
elif v == '||' and num:
|
||||
return (num, [])
|
||||
elif v == ',':
|
||||
# skip
|
||||
return get_term(lst[1:])
|
||||
elif v == '?':
|
||||
count_par = 0
|
||||
i = 1
|
||||
while i < len(lst):
|
||||
(p, v) = lst[i]
|
||||
|
||||
if p == OP:
|
||||
if v == ')':
|
||||
count_par -= 1
|
||||
elif v == '(':
|
||||
count_par += 1
|
||||
elif v == ':':
|
||||
if count_par == 0:
|
||||
break
|
||||
i += 1
|
||||
else:
|
||||
raise PreprocError("rparen expected %r" % lst)
|
||||
|
||||
if int(num):
|
||||
return get_term(lst[1:i])
|
||||
else:
|
||||
return get_term(lst[i+1:])
|
||||
|
||||
else:
|
||||
num2, lst = get_num(lst[1:])
|
||||
|
||||
if not lst:
|
||||
# no more tokens to process
|
||||
num2 = reduce_nums(num, num2, v)
|
||||
return get_term([(NUM, num2)] + lst)
|
||||
|
||||
# operator precedence
|
||||
p2, v2 = lst[0]
|
||||
if p2 != OP:
|
||||
raise PreprocError("op expected %r" % lst)
|
||||
|
||||
if prec[v2] >= prec[v]:
|
||||
num2 = reduce_nums(num, num2, v)
|
||||
return get_term([(NUM, num2)] + lst)
|
||||
else:
|
||||
num3, lst = get_num(lst[1:])
|
||||
num3 = reduce_nums(num2, num3, v2)
|
||||
return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
|
||||
|
||||
|
||||
raise PreprocError("cannot reduce %r" % lst)
|
||||
|
||||
def reduce_eval(lst):
|
||||
"""take a list of tokens and output true or false (#if/#elif conditions)"""
|
||||
num, lst = get_term(lst)
|
||||
return (NUM, num)
|
||||
|
||||
def stringize(lst):
|
||||
"""use for converting a list of tokens to a string"""
|
||||
lst = [str(v2) for (p2, v2) in lst]
|
||||
return "".join(lst)
|
||||
|
||||
def paste_tokens(t1, t2):
|
||||
"""
|
||||
here is what we can paste:
|
||||
a ## b -> ab
|
||||
> ## = -> >=
|
||||
a ## 2 -> a2
|
||||
"""
|
||||
p1 = None
|
||||
if t1[0] == OP and t2[0] == OP:
|
||||
p1 = OP
|
||||
elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
|
||||
p1 = IDENT
|
||||
elif t1[0] == NUM and t2[0] == NUM:
|
||||
p1 = NUM
|
||||
if not p1:
|
||||
raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
|
||||
return (p1, t1[1] + t2[1])
|
||||
|
||||
def reduce_tokens(lst, defs, ban=[]):
|
||||
"""replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
|
||||
i = 0
|
||||
|
||||
while i < len(lst):
|
||||
(p, v) = lst[i]
|
||||
|
||||
if p == IDENT and v == "defined":
|
||||
del lst[i]
|
||||
if i < len(lst):
|
||||
(p2, v2) = lst[i]
|
||||
if p2 == IDENT:
|
||||
if v2 in defs:
|
||||
lst[i] = (NUM, 1)
|
||||
else:
|
||||
lst[i] = (NUM, 0)
|
||||
elif p2 == OP and v2 == '(':
|
||||
del lst[i]
|
||||
(p2, v2) = lst[i]
|
||||
del lst[i] # remove the ident, and change the ) for the value
|
||||
if v2 in defs:
|
||||
lst[i] = (NUM, 1)
|
||||
else:
|
||||
lst[i] = (NUM, 0)
|
||||
else:
|
||||
raise PreprocError("invalid define expression %r" % lst)
|
||||
|
||||
elif p == IDENT and v in defs:
|
||||
|
||||
if isinstance(defs[v], str):
|
||||
a, b = extract_macro(defs[v])
|
||||
defs[v] = b
|
||||
macro_def = defs[v]
|
||||
to_add = macro_def[1]
|
||||
|
||||
if isinstance(macro_def[0], list):
|
||||
# macro without arguments
|
||||
del lst[i]
|
||||
for x in xrange(len(to_add)):
|
||||
lst.insert(i, to_add[x])
|
||||
i += 1
|
||||
else:
|
||||
# collect the arguments for the funcall
|
||||
|
||||
args = []
|
||||
del lst[i]
|
||||
|
||||
if i >= len(lst):
|
||||
raise PreprocError("expected '(' after %r (got nothing)" % v)
|
||||
|
||||
(p2, v2) = lst[i]
|
||||
if p2 != OP or v2 != '(':
|
||||
raise PreprocError("expected '(' after %r" % v)
|
||||
|
||||
del lst[i]
|
||||
|
||||
one_param = []
|
||||
count_paren = 0
|
||||
while i < len(lst):
|
||||
p2, v2 = lst[i]
|
||||
|
||||
del lst[i]
|
||||
if p2 == OP and count_paren == 0:
|
||||
if v2 == '(':
|
||||
one_param.append((p2, v2))
|
||||
count_paren += 1
|
||||
elif v2 == ')':
|
||||
if one_param: args.append(one_param)
|
||||
break
|
||||
elif v2 == ',':
|
||||
if not one_param: raise PreprocError("empty param in funcall %s" % p)
|
||||
args.append(one_param)
|
||||
one_param = []
|
||||
else:
|
||||
one_param.append((p2, v2))
|
||||
else:
|
||||
one_param.append((p2, v2))
|
||||
if v2 == '(': count_paren += 1
|
||||
elif v2 == ')': count_paren -= 1
|
||||
else:
|
||||
raise PreprocError('malformed macro')
|
||||
|
||||
# substitute the arguments within the define expression
|
||||
accu = []
|
||||
arg_table = macro_def[0]
|
||||
j = 0
|
||||
while j < len(to_add):
|
||||
(p2, v2) = to_add[j]
|
||||
|
||||
if p2 == OP and v2 == '#':
|
||||
# stringize is for arguments only
|
||||
if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
|
||||
toks = args[arg_table[to_add[j+1][1]]]
|
||||
accu.append((STR, stringize(toks)))
|
||||
j += 1
|
||||
else:
|
||||
accu.append((p2, v2))
|
||||
elif p2 == OP and v2 == '##':
|
||||
# token pasting, how can man invent such a complicated system?
|
||||
if accu and j+1 < len(to_add):
|
||||
# we have at least two tokens
|
||||
|
||||
t1 = accu[-1]
|
||||
|
||||
if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
|
||||
toks = args[arg_table[to_add[j+1][1]]]
|
||||
|
||||
if toks:
|
||||
accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
|
||||
accu.extend(toks[1:])
|
||||
else:
|
||||
# error, case "a##"
|
||||
accu.append((p2, v2))
|
||||
accu.extend(toks)
|
||||
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
|
||||
# TODO not sure
|
||||
# first collect the tokens
|
||||
va_toks = []
|
||||
st = len(macro_def[0])
|
||||
pt = len(args)
|
||||
for x in args[pt-st+1:]:
|
||||
va_toks.extend(x)
|
||||
va_toks.append((OP, ','))
|
||||
if va_toks: va_toks.pop() # extra comma
|
||||
if len(accu)>1:
|
||||
(p3, v3) = accu[-1]
|
||||
(p4, v4) = accu[-2]
|
||||
if v3 == '##':
|
||||
# remove the token paste
|
||||
accu.pop()
|
||||
if v4 == ',' and pt < st:
|
||||
# remove the comma
|
||||
accu.pop()
|
||||
accu += va_toks
|
||||
else:
|
||||
accu[-1] = paste_tokens(t1, to_add[j+1])
|
||||
|
||||
j += 1
|
||||
else:
|
||||
# invalid paste, case "##a" or "b##"
|
||||
accu.append((p2, v2))
|
||||
|
||||
elif p2 == IDENT and v2 in arg_table:
|
||||
toks = args[arg_table[v2]]
|
||||
reduce_tokens(toks, defs, ban+[v])
|
||||
accu.extend(toks)
|
||||
else:
|
||||
accu.append((p2, v2))
|
||||
|
||||
j += 1
|
||||
|
||||
|
||||
reduce_tokens(accu, defs, ban+[v])
|
||||
|
||||
for x in xrange(len(accu)-1, -1, -1):
|
||||
lst.insert(i, accu[x])
|
||||
|
||||
i += 1
|
||||
|
||||
|
||||
def eval_macro(lst, adefs):
|
||||
"""reduce the tokens from the list lst, and try to return a 0/1 result"""
|
||||
reduce_tokens(lst, adefs, [])
|
||||
if not lst: raise PreprocError("missing tokens to evaluate")
|
||||
(p, v) = reduce_eval(lst)
|
||||
return int(v) != 0
|
||||
|
||||
def extract_macro(txt):
|
||||
"""process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
|
||||
t = tokenize(txt)
|
||||
if re_fun.search(txt):
|
||||
p, name = t[0]
|
||||
|
||||
p, v = t[1]
|
||||
if p != OP: raise PreprocError("expected open parenthesis")
|
||||
|
||||
i = 1
|
||||
pindex = 0
|
||||
params = {}
|
||||
prev = '('
|
||||
|
||||
while 1:
|
||||
i += 1
|
||||
p, v = t[i]
|
||||
|
||||
if prev == '(':
|
||||
if p == IDENT:
|
||||
params[v] = pindex
|
||||
pindex += 1
|
||||
prev = p
|
||||
elif p == OP and v == ')':
|
||||
break
|
||||
else:
|
||||
raise PreprocError("unexpected token (3)")
|
||||
elif prev == IDENT:
|
||||
if p == OP and v == ',':
|
||||
prev = v
|
||||
elif p == OP and v == ')':
|
||||
break
|
||||
else:
|
||||
raise PreprocError("comma or ... expected")
|
||||
elif prev == ',':
|
||||
if p == IDENT:
|
||||
params[v] = pindex
|
||||
pindex += 1
|
||||
prev = p
|
||||
elif p == OP and v == '...':
|
||||
raise PreprocError("not implemented (1)")
|
||||
else:
|
||||
raise PreprocError("comma or ... expected (2)")
|
||||
elif prev == '...':
|
||||
raise PreprocError("not implemented (2)")
|
||||
else:
|
||||
raise PreprocError("unexpected else")
|
||||
|
||||
#~ print (name, [params, t[i+1:]])
|
||||
return (name, [params, t[i+1:]])
|
||||
else:
|
||||
(p, v) = t[0]
|
||||
return (v, [[], t[1:]])
|
||||
|
||||
re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
|
||||
def extract_include(txt, defs):
|
||||
"""process a line in the form "#include foo" to return a string representing the file"""
|
||||
m = re_include.search(txt)
|
||||
if m:
|
||||
if m.group('a'): return '<', m.group('a')
|
||||
if m.group('b'): return '"', m.group('b')
|
||||
|
||||
# perform preprocessing and look at the result, it must match an include
|
||||
toks = tokenize(txt)
|
||||
reduce_tokens(toks, defs, ['waf_include'])
|
||||
|
||||
if not toks:
|
||||
raise PreprocError("could not parse include %s" % txt)
|
||||
|
||||
if len(toks) == 1:
|
||||
if toks[0][0] == STR:
|
||||
return '"', toks[0][1]
|
||||
else:
|
||||
if toks[0][1] == '<' and toks[-1][1] == '>':
|
||||
return stringize(toks).lstrip('<').rstrip('>')
|
||||
|
||||
raise PreprocError("could not parse include %s." % txt)
|
||||
|
||||
def parse_char(txt):
|
||||
if not txt: raise PreprocError("attempted to parse a null char")
|
||||
if txt[0] != '\\':
|
||||
return ord(txt)
|
||||
c = txt[1]
|
||||
if c == 'x':
|
||||
if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
|
||||
return int(txt[2:], 16)
|
||||
elif c.isdigit():
|
||||
if c == '0' and len(txt)==2: return 0
|
||||
for i in 3, 2, 1:
|
||||
if len(txt) > i and txt[1:1+i].isdigit():
|
||||
return (1+i, int(txt[1:1+i], 8))
|
||||
else:
|
||||
try: return chr_esc[c]
|
||||
except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
|
||||
|
||||
@Utils.run_once
|
||||
def tokenize_private(s):
|
||||
ret = []
|
||||
for match in re_clexer.finditer(s):
|
||||
m = match.group
|
||||
for name in tok_types:
|
||||
v = m(name)
|
||||
if v:
|
||||
if name == IDENT:
|
||||
try: v = g_optrans[v]; name = OP
|
||||
except KeyError:
|
||||
# c++ specific
|
||||
if v.lower() == "true":
|
||||
v = 1
|
||||
name = NUM
|
||||
elif v.lower() == "false":
|
||||
v = 0
|
||||
name = NUM
|
||||
elif name == NUM:
|
||||
if m('oct'): v = int(v, 8)
|
||||
elif m('hex'): v = int(m('hex'), 16)
|
||||
elif m('n0'): v = m('n0')
|
||||
else:
|
||||
v = m('char')
|
||||
if v: v = parse_char(v)
|
||||
else: v = m('n2') or m('n4')
|
||||
elif name == OP:
|
||||
if v == '%:': v = '#'
|
||||
elif v == '%:%:': v = '##'
|
||||
elif name == STR:
|
||||
# remove the quotes around the string
|
||||
v = v[1:-1]
|
||||
ret.append((name, v))
|
||||
break
|
||||
return ret
|
||||
|
||||
def tokenize(s):
|
||||
"""convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
|
||||
return tokenize_private(s)[:]
|
||||
|
||||
@Utils.run_once
|
||||
def define_name(line):
|
||||
return re_mac.match(line).group(0)
|
||||
|
||||
class c_parser(object):
|
||||
def __init__(self, nodepaths=None, defines=None):
|
||||
#self.lines = txt.split('\n')
|
||||
self.lines = []
|
||||
|
||||
if defines is None:
|
||||
self.defs = {}
|
||||
else:
|
||||
self.defs = dict(defines) # make a copy
|
||||
self.state = []
|
||||
|
||||
self.env = None # needed for the variant when searching for files
|
||||
|
||||
self.count_files = 0
|
||||
self.currentnode_stack = []
|
||||
|
||||
self.nodepaths = nodepaths or []
|
||||
|
||||
self.nodes = []
|
||||
self.names = []
|
||||
|
||||
# file added
|
||||
self.curfile = ''
|
||||
self.ban_includes = set([])
|
||||
|
||||
def cached_find_resource(self, node, filename):
|
||||
try:
|
||||
nd = node.bld.cache_nd
|
||||
except:
|
||||
nd = node.bld.cache_nd = {}
|
||||
|
||||
tup = (node.id, filename)
|
||||
try:
|
||||
return nd[tup]
|
||||
except KeyError:
|
||||
ret = node.find_resource(filename)
|
||||
nd[tup] = ret
|
||||
return ret
|
||||
|
||||
def tryfind(self, filename):
|
||||
self.curfile = filename
|
||||
|
||||
# for msvc it should be a for loop on the whole stack
|
||||
found = self.cached_find_resource(self.currentnode_stack[-1], filename)
|
||||
|
||||
for n in self.nodepaths:
|
||||
if found:
|
||||
break
|
||||
found = self.cached_find_resource(n, filename)
|
||||
|
||||
if found:
|
||||
self.nodes.append(found)
|
||||
if filename[-4:] != '.moc':
|
||||
self.addlines(found)
|
||||
else:
|
||||
if not filename in self.names:
|
||||
self.names.append(filename)
|
||||
return found
|
||||
|
||||
def addlines(self, node):
|
||||
|
||||
self.currentnode_stack.append(node.parent)
|
||||
filepath = node.abspath(self.env)
|
||||
|
||||
self.count_files += 1
|
||||
if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
|
||||
pc = self.parse_cache
|
||||
debug('preproc: reading file %r', filepath)
|
||||
try:
|
||||
lns = pc[filepath]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
self.lines.extend(lns)
|
||||
return
|
||||
|
||||
try:
|
||||
lines = filter_comments(filepath)
|
||||
lines.append((POPFILE, ''))
|
||||
lines.reverse()
|
||||
pc[filepath] = lines # cache the lines filtered
|
||||
self.lines.extend(lines)
|
||||
except IOError:
|
||||
raise PreprocError("could not read the file %s" % filepath)
|
||||
except Exception:
|
||||
if Logs.verbose > 0:
|
||||
error("parsing %s failed" % filepath)
|
||||
traceback.print_exc()
|
||||
|
||||
def start(self, node, env):
|
||||
debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
|
||||
|
||||
self.env = env
|
||||
variant = node.variant(env)
|
||||
bld = node.__class__.bld
|
||||
try:
|
||||
self.parse_cache = bld.parse_cache
|
||||
except AttributeError:
|
||||
bld.parse_cache = {}
|
||||
self.parse_cache = bld.parse_cache
|
||||
|
||||
self.addlines(node)
|
||||
if env['DEFLINES']:
|
||||
lst = [('define', x) for x in env['DEFLINES']]
|
||||
lst.reverse()
|
||||
self.lines.extend(lst)
|
||||
|
||||
while self.lines:
|
||||
(kind, line) = self.lines.pop()
|
||||
if kind == POPFILE:
|
||||
self.currentnode_stack.pop()
|
||||
continue
|
||||
try:
|
||||
self.process_line(kind, line)
|
||||
except Exception, e:
|
||||
if Logs.verbose:
|
||||
debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
|
||||
|
||||
def process_line(self, token, line):
|
||||
"""
|
||||
WARNING: a new state must be added for if* because the endif
|
||||
"""
|
||||
ve = Logs.verbose
|
||||
if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
|
||||
state = self.state
|
||||
|
||||
# make certain we define the state if we are about to enter in an if block
|
||||
if token in ['ifdef', 'ifndef', 'if']:
|
||||
state.append(undefined)
|
||||
elif token == 'endif':
|
||||
state.pop()
|
||||
|
||||
# skip lines when in a dead 'if' branch, wait for the endif
|
||||
if not token in ['else', 'elif', 'endif']:
|
||||
if skipped in self.state or ignored in self.state:
|
||||
return
|
||||
|
||||
if token == 'if':
|
||||
ret = eval_macro(tokenize(line), self.defs)
|
||||
if ret: state[-1] = accepted
|
||||
else: state[-1] = ignored
|
||||
elif token == 'ifdef':
|
||||
m = re_mac.match(line)
|
||||
if m and m.group(0) in self.defs: state[-1] = accepted
|
||||
else: state[-1] = ignored
|
||||
elif token == 'ifndef':
|
||||
m = re_mac.match(line)
|
||||
if m and m.group(0) in self.defs: state[-1] = ignored
|
||||
else: state[-1] = accepted
|
||||
elif token == 'include' or token == 'import':
|
||||
(kind, inc) = extract_include(line, self.defs)
|
||||
if inc in self.ban_includes: return
|
||||
if token == 'import': self.ban_includes.add(inc)
|
||||
if ve: debug('preproc: include found %s (%s) ', inc, kind)
|
||||
if kind == '"' or not strict_quotes:
|
||||
self.tryfind(inc)
|
||||
elif token == 'elif':
|
||||
if state[-1] == accepted:
|
||||
state[-1] = skipped
|
||||
elif state[-1] == ignored:
|
||||
if eval_macro(tokenize(line), self.defs):
|
||||
state[-1] = accepted
|
||||
elif token == 'else':
|
||||
if state[-1] == accepted: state[-1] = skipped
|
||||
elif state[-1] == ignored: state[-1] = accepted
|
||||
elif token == 'define':
|
||||
try:
|
||||
self.defs[define_name(line)] = line
|
||||
except:
|
||||
raise PreprocError("invalid define line %s" % line)
|
||||
elif token == 'undef':
|
||||
m = re_mac.match(line)
|
||||
if m and m.group(0) in self.defs:
|
||||
self.defs.__delitem__(m.group(0))
|
||||
#print "undef %s" % name
|
||||
elif token == 'pragma':
|
||||
if re_pragma_once.match(line.lower()):
|
||||
self.ban_includes.add(self.curfile)
|
||||
|
||||
def get_deps(node, env, nodepaths=[]):
|
||||
"""
|
||||
Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
|
||||
#include some_macro()
|
||||
"""
|
||||
|
||||
gruik = c_parser(nodepaths)
|
||||
gruik.start(node, env)
|
||||
return (gruik.nodes, gruik.names)
|
||||
|
||||
#################### dumb dependency scanner
|
||||
|
||||
re_inc = re.compile(\
|
||||
'^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
|
||||
re.IGNORECASE | re.MULTILINE)
|
||||
|
||||
def lines_includes(filename):
|
||||
code = Utils.readf(filename)
|
||||
if use_trigraphs:
|
||||
for (a, b) in trig_def: code = code.split(a).join(b)
|
||||
code = re_nl.sub('', code)
|
||||
code = re_cpp.sub(repl, code)
|
||||
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
|
||||
|
||||
def get_deps_simple(node, env, nodepaths=[], defines={}):
|
||||
"""
|
||||
Get the dependencies by just looking recursively at the #include statements
|
||||
"""
|
||||
|
||||
nodes = []
|
||||
names = []
|
||||
|
||||
def find_deps(node):
|
||||
lst = lines_includes(node.abspath(env))
|
||||
|
||||
for (_, line) in lst:
|
||||
(t, filename) = extract_include(line, defines)
|
||||
if filename in names:
|
||||
continue
|
||||
|
||||
if filename.endswith('.moc'):
|
||||
names.append(filename)
|
||||
|
||||
found = None
|
||||
for n in nodepaths:
|
||||
if found:
|
||||
break
|
||||
found = n.find_resource(filename)
|
||||
|
||||
if not found:
|
||||
if not filename in names:
|
||||
names.append(filename)
|
||||
elif not found in nodes:
|
||||
nodes.append(found)
|
||||
find_deps(node)
|
||||
|
||||
find_deps(node)
|
||||
return (nodes, names)
|
432
third_party/waf/wafadmin/Tools/python.py
vendored
432
third_party/waf/wafadmin/Tools/python.py
vendored
@ -1,432 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2007 (ita)
|
||||
# Gustavo Carneiro (gjc), 2007
|
||||
|
||||
"Python support"
|
||||
|
||||
import os, sys
|
||||
import TaskGen, Utils, Options
|
||||
from Logs import debug, warn, info
|
||||
from TaskGen import extension, before, after, feature
|
||||
from Configure import conf
|
||||
from config_c import parse_flags
|
||||
|
||||
EXT_PY = ['.py']
|
||||
FRAG_2 = '''
|
||||
#include "Python.h"
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
void Py_Initialize(void);
|
||||
void Py_Finalize(void);
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
int main()
|
||||
{
|
||||
Py_Initialize();
|
||||
Py_Finalize();
|
||||
return 0;
|
||||
}
|
||||
'''
|
||||
|
||||
@feature('pyext')
|
||||
@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars', 'apply_bundle')
|
||||
@after('vars_target_cshlib')
|
||||
def init_pyext(self):
|
||||
self.default_install_path = '${PYTHONARCHDIR}'
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', ''))
|
||||
if not 'PYEXT' in self.uselib:
|
||||
self.uselib.append('PYEXT')
|
||||
self.env['MACBUNDLE'] = True
|
||||
|
||||
@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
|
||||
@after('apply_bundle')
|
||||
@feature('pyext')
|
||||
def pyext_shlib_ext(self):
|
||||
# override shlib_PATTERN set by the osx module
|
||||
self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
|
||||
|
||||
@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
|
||||
@feature('pyembed')
|
||||
def init_pyembed(self):
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', ''))
|
||||
if not 'PYEMBED' in self.uselib:
|
||||
self.uselib.append('PYEMBED')
|
||||
|
||||
@extension(EXT_PY)
|
||||
def process_py(self, node):
|
||||
if not (self.bld.is_install and self.install_path):
|
||||
return
|
||||
def inst_py(ctx):
|
||||
install_pyfile(self, node)
|
||||
self.bld.add_post_fun(inst_py)
|
||||
|
||||
def install_pyfile(self, node):
|
||||
path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
|
||||
|
||||
self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
|
||||
if self.bld.is_install < 0:
|
||||
info("* removing byte compiled python files")
|
||||
for x in 'co':
|
||||
try:
|
||||
os.remove(path + x)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if self.bld.is_install > 0:
|
||||
if self.env['PYC'] or self.env['PYO']:
|
||||
info("* byte compiling %r" % path)
|
||||
|
||||
if self.env['PYC']:
|
||||
program = ("""
|
||||
import sys, py_compile
|
||||
for pyfile in sys.argv[1:]:
|
||||
py_compile.compile(pyfile, pyfile + 'c')
|
||||
""")
|
||||
argv = [self.env['PYTHON'], '-c', program, path]
|
||||
ret = Utils.pproc.Popen(argv).wait()
|
||||
if ret:
|
||||
raise Utils.WafError('bytecode compilation failed %r' % path)
|
||||
|
||||
if self.env['PYO']:
|
||||
program = ("""
|
||||
import sys, py_compile
|
||||
for pyfile in sys.argv[1:]:
|
||||
py_compile.compile(pyfile, pyfile + 'o')
|
||||
""")
|
||||
argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
|
||||
ret = Utils.pproc.Popen(argv).wait()
|
||||
if ret:
|
||||
raise Utils.WafError('bytecode compilation failed %r' % path)
|
||||
|
||||
# COMPAT
|
||||
class py_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@before('apply_core')
|
||||
@after('vars_target_cprogram', 'vars_target_cshlib')
|
||||
@feature('py')
|
||||
def init_py(self):
|
||||
self.default_install_path = '${PYTHONDIR}'
|
||||
|
||||
def _get_python_variables(python_exe, variables, imports=['import sys']):
|
||||
"""Run a python interpreter and print some variables"""
|
||||
program = list(imports)
|
||||
program.append('')
|
||||
for v in variables:
|
||||
program.append("print(repr(%s))" % v)
|
||||
os_env = dict(os.environ)
|
||||
try:
|
||||
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
|
||||
except KeyError:
|
||||
pass
|
||||
proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
|
||||
output = proc.communicate()[0].split("\n") # do not touch, python3
|
||||
if proc.returncode:
|
||||
if Options.options.verbose:
|
||||
warn("Python program to extract python configuration variables failed:\n%s"
|
||||
% '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
|
||||
raise RuntimeError
|
||||
return_values = []
|
||||
for s in output:
|
||||
s = s.strip()
|
||||
if not s:
|
||||
continue
|
||||
if s == 'None':
|
||||
return_values.append(None)
|
||||
elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
|
||||
return_values.append(eval(s))
|
||||
elif s[0].isdigit():
|
||||
return_values.append(int(s))
|
||||
else: break
|
||||
return return_values
|
||||
|
||||
@conf
|
||||
def check_python_headers(conf, mandatory=True):
|
||||
"""Check for headers and libraries necessary to extend or embed python.
|
||||
|
||||
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
|
||||
|
||||
PYEXT: for compiling python extensions
|
||||
PYEMBED: for embedding a python interpreter"""
|
||||
|
||||
if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
|
||||
conf.fatal('load a compiler first (gcc, g++, ..)')
|
||||
|
||||
if not conf.env['PYTHON_VERSION']:
|
||||
conf.check_python_version()
|
||||
|
||||
env = conf.env
|
||||
python = env['PYTHON']
|
||||
if not python:
|
||||
conf.fatal('could not find the python executable')
|
||||
|
||||
## On Mac OSX we need to use mac bundles for python plugins
|
||||
if Options.platform == 'darwin':
|
||||
conf.check_tool('osx')
|
||||
|
||||
try:
|
||||
# Get some python configuration variables using distutils
|
||||
v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDVERSION'.split()
|
||||
(python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
|
||||
python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
|
||||
python_MACOSX_DEPLOYMENT_TARGET, python_LDVERSION) = \
|
||||
_get_python_variables(python, ["get_config_var('%s') or ''" % x for x in v],
|
||||
['from distutils.sysconfig import get_config_var'])
|
||||
except RuntimeError:
|
||||
conf.fatal("Python development headers not found (-v for details).")
|
||||
|
||||
conf.log.write("""Configuration returned from %r:
|
||||
python_prefix = %r
|
||||
python_SO = %r
|
||||
python_SYSLIBS = %r
|
||||
python_LDFLAGS = %r
|
||||
python_SHLIBS = %r
|
||||
python_LIBDIR = %r
|
||||
python_LIBPL = %r
|
||||
INCLUDEPY = %r
|
||||
Py_ENABLE_SHARED = %r
|
||||
MACOSX_DEPLOYMENT_TARGET = %r
|
||||
LDVERSION = %r
|
||||
""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
|
||||
python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET,
|
||||
python_LDVERSION))
|
||||
|
||||
# Allow some python overrides from env vars for cross-compiling
|
||||
os_env = dict(os.environ)
|
||||
|
||||
override_python_LDFLAGS = os_env.get('python_LDFLAGS', None)
|
||||
if override_python_LDFLAGS is not None:
|
||||
conf.log.write("python_LDFLAGS override from environment = %r\n" % (override_python_LDFLAGS))
|
||||
python_LDFLAGS = override_python_LDFLAGS
|
||||
|
||||
override_python_LIBDIR = os_env.get('python_LIBDIR', None)
|
||||
if override_python_LIBDIR is not None:
|
||||
conf.log.write("python_LIBDIR override from environment = %r\n" % (override_python_LIBDIR))
|
||||
python_LIBDIR = override_python_LIBDIR
|
||||
|
||||
if python_MACOSX_DEPLOYMENT_TARGET:
|
||||
conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
|
||||
conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
|
||||
|
||||
env['pyext_PATTERN'] = '%s'+python_SO
|
||||
|
||||
# Check for python libraries for embedding
|
||||
if python_SYSLIBS is not None:
|
||||
for lib in python_SYSLIBS.split():
|
||||
if lib.startswith('-l'):
|
||||
lib = lib[2:] # strip '-l'
|
||||
env.append_value('LIB_PYEMBED', lib)
|
||||
|
||||
if python_SHLIBS is not None:
|
||||
for lib in python_SHLIBS.split():
|
||||
if lib.startswith('-l'):
|
||||
env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
|
||||
else:
|
||||
env.append_value('LINKFLAGS_PYEMBED', lib)
|
||||
|
||||
if Options.platform != 'darwin' and python_LDFLAGS:
|
||||
parse_flags(python_LDFLAGS, 'PYEMBED', env)
|
||||
|
||||
result = False
|
||||
if not python_LDVERSION:
|
||||
python_LDVERSION = env['PYTHON_VERSION']
|
||||
name = 'python' + python_LDVERSION
|
||||
|
||||
if python_LIBDIR is not None:
|
||||
path = [python_LIBDIR]
|
||||
conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
|
||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
|
||||
|
||||
if not result and python_LIBPL is not None:
|
||||
conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
|
||||
path = [python_LIBPL]
|
||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
|
||||
|
||||
if not result:
|
||||
conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
|
||||
path = [os.path.join(python_prefix, "libs")]
|
||||
name = 'python' + python_LDVERSION.replace('.', '')
|
||||
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
|
||||
|
||||
if result:
|
||||
env['LIBPATH_PYEMBED'] = path
|
||||
env.append_value('LIB_PYEMBED', name)
|
||||
else:
|
||||
conf.log.write("\n\n### LIB NOT FOUND\n")
|
||||
|
||||
# under certain conditions, python extensions must link to
|
||||
# python libraries, not just python embedding programs.
|
||||
if (sys.platform == 'win32' or sys.platform.startswith('os2')
|
||||
or sys.platform == 'darwin' or Py_ENABLE_SHARED):
|
||||
env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
|
||||
env['LIB_PYEXT'] = env['LIB_PYEMBED']
|
||||
|
||||
# We check that pythonX.Y-config exists, and if it exists we
|
||||
# use it to get only the includes, else fall back to distutils.
|
||||
python_config = conf.find_program(
|
||||
'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
|
||||
var='PYTHON_CONFIG')
|
||||
if not python_config:
|
||||
python_config = conf.find_program(
|
||||
'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
|
||||
var='PYTHON_CONFIG')
|
||||
|
||||
includes = []
|
||||
if python_config:
|
||||
for incstr in Utils.cmd_output("%s --includes" % (python_config,)).strip().split():
|
||||
# strip the -I or /I
|
||||
if (incstr.startswith('-I')
|
||||
or incstr.startswith('/I')):
|
||||
incstr = incstr[2:]
|
||||
# append include path, unless already given
|
||||
if incstr not in includes:
|
||||
includes.append(incstr)
|
||||
conf.log.write("Include path for Python extensions "
|
||||
"(found via python-config --includes): %r\n" % (includes,))
|
||||
env['CPPPATH_PYEXT'] = includes
|
||||
env['CPPPATH_PYEMBED'] = includes
|
||||
else:
|
||||
conf.log.write("Include path for Python extensions "
|
||||
"(found via distutils module): %r\n" % (INCLUDEPY,))
|
||||
env['CPPPATH_PYEXT'] = [INCLUDEPY]
|
||||
env['CPPPATH_PYEMBED'] = [INCLUDEPY]
|
||||
|
||||
# Code using the Python API needs to be compiled with -fno-strict-aliasing
|
||||
if env['CC_NAME'] == 'gcc':
|
||||
env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
|
||||
env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
|
||||
if env['CXX_NAME'] == 'gcc':
|
||||
env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
|
||||
env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
|
||||
|
||||
# See if it compiles
|
||||
conf.check(define_name='HAVE_PYTHON_H',
|
||||
uselib='PYEMBED', fragment=FRAG_2,
|
||||
errmsg='Could not find the python development headers', mandatory=mandatory)
|
||||
|
||||
@conf
|
||||
def check_python_version(conf, minver=None):
|
||||
"""
|
||||
Check if the python interpreter is found matching a given minimum version.
|
||||
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
|
||||
|
||||
If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
|
||||
(eg. '2.4') of the actual python version found, and PYTHONDIR is
|
||||
defined, pointing to the site-packages directory appropriate for
|
||||
this python version, where modules/packages/extensions should be
|
||||
installed.
|
||||
"""
|
||||
assert minver is None or isinstance(minver, tuple)
|
||||
python = conf.env['PYTHON']
|
||||
if not python:
|
||||
conf.fatal('could not find the python executable')
|
||||
|
||||
# Get python version string
|
||||
cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
|
||||
debug('python: Running python command %r' % cmd)
|
||||
proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, shell=False)
|
||||
lines = proc.communicate()[0].split()
|
||||
assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
|
||||
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
|
||||
|
||||
# compare python version with the minimum required
|
||||
result = (minver is None) or (pyver_tuple >= minver)
|
||||
|
||||
if result:
|
||||
# define useful environment variables
|
||||
pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
|
||||
conf.env['PYTHON_VERSION'] = pyver
|
||||
|
||||
if 'PYTHONDIR' in conf.environ:
|
||||
pydir = conf.environ['PYTHONDIR']
|
||||
else:
|
||||
if sys.platform == 'win32':
|
||||
(python_LIBDEST, pydir) = \
|
||||
_get_python_variables(python,
|
||||
["get_config_var('LIBDEST') or ''",
|
||||
"get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']],
|
||||
['from distutils.sysconfig import get_config_var, get_python_lib'])
|
||||
else:
|
||||
python_LIBDEST = None
|
||||
(pydir,) = \
|
||||
_get_python_variables(python,
|
||||
["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']],
|
||||
['from distutils.sysconfig import get_config_var, get_python_lib'])
|
||||
if python_LIBDEST is None:
|
||||
if conf.env['LIBDIR']:
|
||||
python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
|
||||
else:
|
||||
python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
|
||||
|
||||
if 'PYTHONARCHDIR' in conf.environ:
|
||||
pyarchdir = conf.environ['PYTHONARCHDIR']
|
||||
else:
|
||||
(pyarchdir,) = _get_python_variables(python,
|
||||
["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env['PREFIX']],
|
||||
['from distutils.sysconfig import get_config_var, get_python_lib'])
|
||||
if not pyarchdir:
|
||||
pyarchdir = pydir
|
||||
|
||||
if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
|
||||
conf.define('PYTHONDIR', pydir)
|
||||
conf.define('PYTHONARCHDIR', pyarchdir)
|
||||
|
||||
conf.env['PYTHONDIR'] = pydir
|
||||
|
||||
# Feedback
|
||||
pyver_full = '.'.join(map(str, pyver_tuple[:3]))
|
||||
if minver is None:
|
||||
conf.check_message_custom('Python version', '', pyver_full)
|
||||
else:
|
||||
minver_str = '.'.join(map(str, minver))
|
||||
conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
|
||||
|
||||
if not result:
|
||||
conf.fatal('The python version is too old (%r)' % pyver_full)
|
||||
|
||||
@conf
|
||||
def check_python_module(conf, module_name):
|
||||
"""
|
||||
Check if the selected python interpreter can import the given python module.
|
||||
"""
|
||||
result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
|
||||
stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
|
||||
conf.check_message('Python module', module_name, result)
|
||||
if not result:
|
||||
conf.fatal('Could not find the python module %r' % module_name)
|
||||
|
||||
def detect(conf):
|
||||
|
||||
if not conf.env.PYTHON:
|
||||
conf.env.PYTHON = sys.executable
|
||||
|
||||
python = conf.find_program('python', var='PYTHON')
|
||||
if not python:
|
||||
conf.fatal('Could not find the path of the python executable')
|
||||
|
||||
if conf.env.PYTHON != sys.executable:
|
||||
warn("python executable '%s' different from sys.executable '%s'" % (conf.env.PYTHON, sys.executable))
|
||||
|
||||
v = conf.env
|
||||
v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
|
||||
v['PYFLAGS'] = ''
|
||||
v['PYFLAGS_OPT'] = '-O'
|
||||
|
||||
v['PYC'] = getattr(Options.options, 'pyc', 1)
|
||||
v['PYO'] = getattr(Options.options, 'pyo', 1)
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--nopyc',
|
||||
action='store_false',
|
||||
default=1,
|
||||
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
|
||||
dest = 'pyc')
|
||||
opt.add_option('--nopyo',
|
||||
action='store_false',
|
||||
default=1,
|
||||
help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
|
||||
dest='pyo')
|
504
third_party/waf/wafadmin/Tools/qt4.py
vendored
504
third_party/waf/wafadmin/Tools/qt4.py
vendored
@ -1,504 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"""
|
||||
Qt4 support
|
||||
|
||||
If QT4_ROOT is given (absolute path), the configuration will look in it first
|
||||
|
||||
This module also demonstrates how to add tasks dynamically (when the build has started)
|
||||
"""
|
||||
|
||||
try:
|
||||
from xml.sax import make_parser
|
||||
from xml.sax.handler import ContentHandler
|
||||
except ImportError:
|
||||
has_xml = False
|
||||
ContentHandler = object
|
||||
else:
|
||||
has_xml = True
|
||||
|
||||
import os, sys
|
||||
import ccroot, cxx
|
||||
import TaskGen, Task, Utils, Runner, Options, Node, Configure
|
||||
from TaskGen import taskgen, feature, after, extension
|
||||
from Logs import error
|
||||
from Constants import *
|
||||
|
||||
MOC_H = ['.h', '.hpp', '.hxx', '.hh']
|
||||
EXT_RCC = ['.qrc']
|
||||
EXT_UI = ['.ui']
|
||||
EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
|
||||
|
||||
class qxx_task(Task.Task):
|
||||
"A cpp task that may create a moc task dynamically"
|
||||
|
||||
before = ['cxx_link', 'static_link']
|
||||
|
||||
def __init__(self, *k, **kw):
|
||||
Task.Task.__init__(self, *k, **kw)
|
||||
self.moc_done = 0
|
||||
|
||||
def scan(self):
|
||||
(nodes, names) = ccroot.scan(self)
|
||||
# for some reasons (variants) the moc node may end in the list of node deps
|
||||
for x in nodes:
|
||||
if x.name.endswith('.moc'):
|
||||
nodes.remove(x)
|
||||
names.append(x.relpath_gen(self.inputs[0].parent))
|
||||
return (nodes, names)
|
||||
|
||||
def runnable_status(self):
|
||||
if self.moc_done:
|
||||
# if there is a moc task, delay the computation of the file signature
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return ASK_LATER
|
||||
# the moc file enters in the dependency calculation
|
||||
# so we need to recompute the signature when the moc file is present
|
||||
self.signature()
|
||||
return Task.Task.runnable_status(self)
|
||||
else:
|
||||
# yes, really, there are people who generate cxx files
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return ASK_LATER
|
||||
self.add_moc_tasks()
|
||||
return ASK_LATER
|
||||
|
||||
def add_moc_tasks(self):
|
||||
|
||||
node = self.inputs[0]
|
||||
tree = node.__class__.bld
|
||||
|
||||
try:
|
||||
# compute the signature once to know if there is a moc file to create
|
||||
self.signature()
|
||||
except KeyError:
|
||||
# the moc file may be referenced somewhere else
|
||||
pass
|
||||
else:
|
||||
# remove the signature, it must be recomputed with the moc task
|
||||
delattr(self, 'cache_sig')
|
||||
|
||||
moctasks=[]
|
||||
mocfiles=[]
|
||||
variant = node.variant(self.env)
|
||||
try:
|
||||
tmp_lst = tree.raw_deps[self.unique_id()]
|
||||
tree.raw_deps[self.unique_id()] = []
|
||||
except KeyError:
|
||||
tmp_lst = []
|
||||
for d in tmp_lst:
|
||||
if not d.endswith('.moc'): continue
|
||||
# paranoid check
|
||||
if d in mocfiles:
|
||||
error("paranoia owns")
|
||||
continue
|
||||
|
||||
# process that base.moc only once
|
||||
mocfiles.append(d)
|
||||
|
||||
# find the extension (performed only when the .cpp has changes)
|
||||
base2 = d[:-4]
|
||||
for path in [node.parent] + self.generator.env['INC_PATHS']:
|
||||
tree.rescan(path)
|
||||
vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H
|
||||
for ex in vals:
|
||||
h_node = path.find_resource(base2 + ex)
|
||||
if h_node:
|
||||
break
|
||||
else:
|
||||
continue
|
||||
break
|
||||
else:
|
||||
raise Utils.WafError("no header found for %s which is a moc file" % str(d))
|
||||
|
||||
m_node = h_node.change_ext('.moc')
|
||||
tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
|
||||
|
||||
# create the task
|
||||
task = Task.TaskBase.classes['moc'](self.env, normal=0)
|
||||
task.set_inputs(h_node)
|
||||
task.set_outputs(m_node)
|
||||
|
||||
generator = tree.generator
|
||||
generator.outstanding.insert(0, task)
|
||||
generator.total += 1
|
||||
|
||||
moctasks.append(task)
|
||||
|
||||
# remove raw deps except the moc files to save space (optimization)
|
||||
tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
|
||||
|
||||
# look at the file inputs, it is set right above
|
||||
lst = tree.node_deps.get(self.unique_id(), ())
|
||||
for d in lst:
|
||||
name = d.name
|
||||
if name.endswith('.moc'):
|
||||
task = Task.TaskBase.classes['moc'](self.env, normal=0)
|
||||
task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
|
||||
task.set_outputs(d)
|
||||
|
||||
generator = tree.generator
|
||||
generator.outstanding.insert(0, task)
|
||||
generator.total += 1
|
||||
|
||||
moctasks.append(task)
|
||||
|
||||
# simple scheduler dependency: run the moc task before others
|
||||
self.run_after = moctasks
|
||||
self.moc_done = 1
|
||||
|
||||
run = Task.TaskBase.classes['cxx'].__dict__['run']
|
||||
|
||||
def translation_update(task):
|
||||
outs = [a.abspath(task.env) for a in task.outputs]
|
||||
outs = " ".join(outs)
|
||||
lupdate = task.env['QT_LUPDATE']
|
||||
|
||||
for x in task.inputs:
|
||||
file = x.abspath(task.env)
|
||||
cmd = "%s %s -ts %s" % (lupdate, file, outs)
|
||||
Utils.pprint('BLUE', cmd)
|
||||
task.generator.bld.exec_command(cmd)
|
||||
|
||||
class XMLHandler(ContentHandler):
|
||||
def __init__(self):
|
||||
self.buf = []
|
||||
self.files = []
|
||||
def startElement(self, name, attrs):
|
||||
if name == 'file':
|
||||
self.buf = []
|
||||
def endElement(self, name):
|
||||
if name == 'file':
|
||||
self.files.append(''.join(self.buf))
|
||||
def characters(self, cars):
|
||||
self.buf.append(cars)
|
||||
|
||||
def scan(self):
|
||||
"add the dependency on the files referenced in the qrc"
|
||||
node = self.inputs[0]
|
||||
parser = make_parser()
|
||||
curHandler = XMLHandler()
|
||||
parser.setContentHandler(curHandler)
|
||||
fi = open(self.inputs[0].abspath(self.env))
|
||||
parser.parse(fi)
|
||||
fi.close()
|
||||
|
||||
nodes = []
|
||||
names = []
|
||||
root = self.inputs[0].parent
|
||||
for x in curHandler.files:
|
||||
nd = root.find_resource(x)
|
||||
if nd: nodes.append(nd)
|
||||
else: names.append(x)
|
||||
|
||||
return (nodes, names)
|
||||
|
||||
@extension(EXT_RCC)
|
||||
def create_rcc_task(self, node):
|
||||
"hook for rcc files"
|
||||
rcnode = node.change_ext('_rc.cpp')
|
||||
rcctask = self.create_task('rcc', node, rcnode)
|
||||
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
|
||||
self.compiled_tasks.append(cpptask)
|
||||
return cpptask
|
||||
|
||||
@extension(EXT_UI)
|
||||
def create_uic_task(self, node):
|
||||
"hook for uic tasks"
|
||||
uictask = self.create_task('ui4', node)
|
||||
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
|
||||
return uictask
|
||||
|
||||
class qt4_taskgen(cxx.cxx_taskgen):
|
||||
def __init__(self, *k, **kw):
|
||||
cxx.cxx_taskgen.__init__(self, *k, **kw)
|
||||
self.features.append('qt4')
|
||||
|
||||
@extension('.ts')
|
||||
def add_lang(self, node):
|
||||
"""add all the .ts file into self.lang"""
|
||||
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
|
||||
|
||||
@feature('qt4')
|
||||
@after('apply_link')
|
||||
def apply_qt4(self):
|
||||
if getattr(self, 'lang', None):
|
||||
update = getattr(self, 'update', None)
|
||||
lst=[]
|
||||
trans=[]
|
||||
for l in self.to_list(self.lang):
|
||||
|
||||
if not isinstance(l, Node.Node):
|
||||
l = self.path.find_resource(l+'.ts')
|
||||
|
||||
t = self.create_task('ts2qm', l, l.change_ext('.qm'))
|
||||
lst.append(t.outputs[0])
|
||||
|
||||
if update:
|
||||
trans.append(t.inputs[0])
|
||||
|
||||
trans_qt4 = getattr(Options.options, 'trans_qt4', False)
|
||||
if update and trans_qt4:
|
||||
# we need the cpp files given, except the rcc task we create after
|
||||
# FIXME may be broken
|
||||
u = Task.TaskCmd(translation_update, self.env, 2)
|
||||
u.inputs = [a.inputs[0] for a in self.compiled_tasks]
|
||||
u.outputs = trans
|
||||
|
||||
if getattr(self, 'langname', None):
|
||||
t = Task.TaskBase.classes['qm2rcc'](self.env)
|
||||
t.set_inputs(lst)
|
||||
t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
|
||||
t.path = self.path
|
||||
k = create_rcc_task(self, t.outputs[0])
|
||||
self.link_task.inputs.append(k.outputs[0])
|
||||
|
||||
self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
|
||||
self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
|
||||
|
||||
@extension(EXT_QT4)
|
||||
def cxx_hook(self, node):
|
||||
# create the compilation task: cpp or cc
|
||||
try: obj_ext = self.obj_ext
|
||||
except AttributeError: obj_ext = '_%d.o' % self.idx
|
||||
|
||||
task = self.create_task('qxx', node, node.change_ext(obj_ext))
|
||||
self.compiled_tasks.append(task)
|
||||
return task
|
||||
|
||||
def process_qm2rcc(task):
|
||||
outfile = task.outputs[0].abspath(task.env)
|
||||
f = open(outfile, 'w')
|
||||
f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
|
||||
for k in task.inputs:
|
||||
f.write(' <file>')
|
||||
#f.write(k.name)
|
||||
f.write(k.path_to_parent(task.path))
|
||||
f.write('</file>\n')
|
||||
f.write('</qresource>\n</RCC>')
|
||||
f.close()
|
||||
|
||||
b = Task.simple_task_type
|
||||
b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
|
||||
cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
|
||||
cls.scan = scan
|
||||
b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
|
||||
b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
|
||||
|
||||
Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
|
||||
|
||||
def detect_qt4(conf):
|
||||
env = conf.env
|
||||
opt = Options.options
|
||||
|
||||
qtdir = getattr(opt, 'qtdir', '')
|
||||
qtbin = getattr(opt, 'qtbin', '')
|
||||
qtlibs = getattr(opt, 'qtlibs', '')
|
||||
useframework = getattr(opt, 'use_qt4_osxframework', True)
|
||||
|
||||
paths = []
|
||||
|
||||
# the path to qmake has been given explicitely
|
||||
if qtbin:
|
||||
paths = [qtbin]
|
||||
|
||||
# the qt directory has been given - we deduce the qt binary path
|
||||
if not qtdir:
|
||||
qtdir = conf.environ.get('QT4_ROOT', '')
|
||||
qtbin = os.path.join(qtdir, 'bin')
|
||||
paths = [qtbin]
|
||||
|
||||
# no qtdir, look in the path and in /usr/local/Trolltech
|
||||
if not qtdir:
|
||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||
paths.append('/usr/share/qt4/bin/')
|
||||
try:
|
||||
lst = os.listdir('/usr/local/Trolltech/')
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
if lst:
|
||||
lst.sort()
|
||||
lst.reverse()
|
||||
|
||||
# keep the highest version
|
||||
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
|
||||
qtbin = os.path.join(qtdir, 'bin')
|
||||
paths.append(qtbin)
|
||||
|
||||
# at the end, try to find qmake in the paths given
|
||||
# keep the one with the highest version
|
||||
cand = None
|
||||
prev_ver = ['4', '0', '0']
|
||||
for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
|
||||
qmake = conf.find_program(qmk, path_list=paths)
|
||||
if qmake:
|
||||
try:
|
||||
version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
if version:
|
||||
new_ver = version.split('.')
|
||||
if new_ver > prev_ver:
|
||||
cand = qmake
|
||||
prev_ver = new_ver
|
||||
if cand:
|
||||
qmake = cand
|
||||
else:
|
||||
conf.fatal('could not find qmake for qt4')
|
||||
|
||||
conf.env.QMAKE = qmake
|
||||
qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
|
||||
qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
|
||||
qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
|
||||
|
||||
if not qtlibs:
|
||||
try:
|
||||
qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
|
||||
except ValueError:
|
||||
qtlibs = os.path.join(qtdir, 'lib')
|
||||
|
||||
def find_bin(lst, var):
|
||||
for f in lst:
|
||||
ret = conf.find_program(f, path_list=paths)
|
||||
if ret:
|
||||
env[var]=ret
|
||||
break
|
||||
|
||||
vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
|
||||
|
||||
find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
|
||||
find_bin(['uic-qt4', 'uic'], 'QT_UIC')
|
||||
if not env['QT_UIC']:
|
||||
conf.fatal('cannot find the uic compiler for qt4')
|
||||
|
||||
try:
|
||||
version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
|
||||
except ValueError:
|
||||
conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
|
||||
|
||||
version = version.replace('Qt User Interface Compiler ','')
|
||||
version = version.replace('User Interface Compiler for Qt', '')
|
||||
if version.find(" 3.") != -1:
|
||||
conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
|
||||
sys.exit(1)
|
||||
conf.check_message('uic version', '', 1, option='(%s)'%version)
|
||||
|
||||
find_bin(['moc-qt4', 'moc'], 'QT_MOC')
|
||||
find_bin(['rcc'], 'QT_RCC')
|
||||
find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
|
||||
find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
|
||||
|
||||
env['UIC3_ST']= '%s -o %s'
|
||||
env['UIC_ST'] = '%s -o %s'
|
||||
env['MOC_ST'] = '-o'
|
||||
env['ui_PATTERN'] = 'ui_%s.h'
|
||||
env['QT_LRELEASE_FLAGS'] = ['-silent']
|
||||
|
||||
vars_debug = [a+'_debug' for a in vars]
|
||||
|
||||
try:
|
||||
conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
|
||||
|
||||
except Configure.ConfigurationError:
|
||||
|
||||
for lib in vars_debug+vars:
|
||||
uselib = lib.upper()
|
||||
|
||||
d = (lib.find('_debug') > 0) and 'd' or ''
|
||||
|
||||
# original author seems to prefer static to shared libraries
|
||||
for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
|
||||
|
||||
conf.check_message_1('Checking for %s %s' % (lib, kind))
|
||||
|
||||
for ext in ['', '4']:
|
||||
path = os.path.join(qtlibs, pat % (lib + d + ext))
|
||||
if os.path.exists(path):
|
||||
env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
|
||||
conf.check_message_2('ok ' + path, 'GREEN')
|
||||
break
|
||||
path = os.path.join(qtbin, pat % (lib + d + ext))
|
||||
if os.path.exists(path):
|
||||
env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
|
||||
conf.check_message_2('ok ' + path, 'GREEN')
|
||||
break
|
||||
else:
|
||||
conf.check_message_2('not found', 'YELLOW')
|
||||
continue
|
||||
break
|
||||
|
||||
env.append_unique('LIBPATH_' + uselib, qtlibs)
|
||||
env.append_unique('CPPPATH_' + uselib, qtincludes)
|
||||
env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
|
||||
else:
|
||||
for i in vars_debug+vars:
|
||||
try:
|
||||
conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# the libpaths are set nicely, unfortunately they make really long command-lines
|
||||
# remove the qtcore ones from qtgui, etc
|
||||
def process_lib(vars_, coreval):
|
||||
for d in vars_:
|
||||
var = d.upper()
|
||||
if var == 'QTCORE': continue
|
||||
|
||||
value = env['LIBPATH_'+var]
|
||||
if value:
|
||||
core = env[coreval]
|
||||
accu = []
|
||||
for lib in value:
|
||||
if lib in core: continue
|
||||
accu.append(lib)
|
||||
env['LIBPATH_'+var] = accu
|
||||
|
||||
process_lib(vars, 'LIBPATH_QTCORE')
|
||||
process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
|
||||
|
||||
# rpath if wanted
|
||||
want_rpath = getattr(Options.options, 'want_rpath', 1)
|
||||
if want_rpath:
|
||||
def process_rpath(vars_, coreval):
|
||||
for d in vars_:
|
||||
var = d.upper()
|
||||
value = env['LIBPATH_'+var]
|
||||
if value:
|
||||
core = env[coreval]
|
||||
accu = []
|
||||
for lib in value:
|
||||
if var != 'QTCORE':
|
||||
if lib in core:
|
||||
continue
|
||||
accu.append('-Wl,--rpath='+lib)
|
||||
env['RPATH_'+var] = accu
|
||||
process_rpath(vars, 'LIBPATH_QTCORE')
|
||||
process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
|
||||
|
||||
env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
|
||||
|
||||
def detect(conf):
|
||||
detect_qt4(conf)
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
|
||||
|
||||
opt.add_option('--header-ext',
|
||||
type='string',
|
||||
default='',
|
||||
help='header extension for moc files',
|
||||
dest='qt_header_ext')
|
||||
|
||||
for i in 'qtdir qtbin qtlibs'.split():
|
||||
opt.add_option('--'+i, type='string', default='', dest=i)
|
||||
|
||||
if sys.platform == "darwin":
|
||||
opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
|
||||
|
||||
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
|
119
third_party/waf/wafadmin/Tools/ruby.py
vendored
119
third_party/waf/wafadmin/Tools/ruby.py
vendored
@ -1,119 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# daniel.svensson at purplescout.se 2008
|
||||
|
||||
import os
|
||||
import Task, Options, Utils
|
||||
from TaskGen import before, feature, after
|
||||
from Configure import conf
|
||||
|
||||
@feature('rubyext')
|
||||
@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
|
||||
@after('default_cc', 'vars_target_cshlib')
|
||||
def init_rubyext(self):
|
||||
self.default_install_path = '${ARCHDIR_RUBY}'
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', ''))
|
||||
if not 'RUBY' in self.uselib:
|
||||
self.uselib.append('RUBY')
|
||||
if not 'RUBYEXT' in self.uselib:
|
||||
self.uselib.append('RUBYEXT')
|
||||
|
||||
@feature('rubyext')
|
||||
@before('apply_link')
|
||||
def apply_ruby_so_name(self):
|
||||
self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
|
||||
|
||||
@conf
|
||||
def check_ruby_version(conf, minver=()):
|
||||
"""
|
||||
Checks if ruby is installed.
|
||||
If installed the variable RUBY will be set in environment.
|
||||
Ruby binary can be overridden by --with-ruby-binary config variable
|
||||
"""
|
||||
|
||||
if Options.options.rubybinary:
|
||||
conf.env.RUBY = Options.options.rubybinary
|
||||
else:
|
||||
conf.find_program("ruby", var="RUBY", mandatory=True)
|
||||
|
||||
ruby = conf.env.RUBY
|
||||
|
||||
try:
|
||||
version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
|
||||
except:
|
||||
conf.fatal('could not determine ruby version')
|
||||
conf.env.RUBY_VERSION = version
|
||||
|
||||
try:
|
||||
ver = tuple(map(int, version.split(".")))
|
||||
except:
|
||||
conf.fatal('unsupported ruby version %r' % version)
|
||||
|
||||
cver = ''
|
||||
if minver:
|
||||
if ver < minver:
|
||||
conf.fatal('ruby is too old')
|
||||
cver = ".".join([str(x) for x in minver])
|
||||
|
||||
conf.check_message('ruby', cver, True, version)
|
||||
|
||||
@conf
|
||||
def check_ruby_ext_devel(conf):
|
||||
if not conf.env.RUBY:
|
||||
conf.fatal('ruby detection is required first')
|
||||
|
||||
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
|
||||
conf.fatal('load a c/c++ compiler first')
|
||||
|
||||
version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
|
||||
|
||||
def read_out(cmd):
|
||||
return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
|
||||
|
||||
def read_config(key):
|
||||
return read_out('puts Config::CONFIG[%r]' % key)
|
||||
|
||||
ruby = conf.env['RUBY']
|
||||
archdir = read_config('archdir')
|
||||
cpppath = archdir
|
||||
if version >= (1, 9, 0):
|
||||
ruby_hdrdir = read_config('rubyhdrdir')
|
||||
cpppath += ruby_hdrdir
|
||||
cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
|
||||
|
||||
conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
|
||||
|
||||
conf.env.LIBPATH_RUBYEXT = read_config('libdir')
|
||||
conf.env.LIBPATH_RUBYEXT += archdir
|
||||
conf.env.CPPPATH_RUBYEXT = cpppath
|
||||
conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
|
||||
conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
|
||||
|
||||
# ok this is really stupid, but the command and flags are combined.
|
||||
# so we try to find the first argument...
|
||||
flags = read_config('LDSHARED')
|
||||
while flags and flags[0][0] != '-':
|
||||
flags = flags[1:]
|
||||
|
||||
# we also want to strip out the deprecated ppc flags
|
||||
if len(flags) > 1 and flags[1] == "ppc":
|
||||
flags = flags[2:]
|
||||
|
||||
conf.env.LINKFLAGS_RUBYEXT = flags
|
||||
conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
|
||||
conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
|
||||
|
||||
if Options.options.rubyarchdir:
|
||||
conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
|
||||
else:
|
||||
conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
|
||||
|
||||
if Options.options.rubylibdir:
|
||||
conf.env.LIBDIR_RUBY = Options.options.rubylibdir
|
||||
else:
|
||||
conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
|
||||
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
|
||||
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
|
76
third_party/waf/wafadmin/Tools/suncc.py
vendored
76
third_party/waf/wafadmin/Tools/suncc.py
vendored
@ -1,76 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
|
||||
import os, optparse
|
||||
import Utils, Options, Configure
|
||||
import ccroot, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_scc(conf):
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v['CC']: cc = v['CC']
|
||||
elif 'CC' in conf.environ: cc = conf.environ['CC']
|
||||
#if not cc: cc = conf.find_program('gcc', var='CC')
|
||||
if not cc: cc = conf.find_program('cc', var='CC')
|
||||
if not cc: conf.fatal('suncc was not found')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
|
||||
try:
|
||||
if not Utils.cmd_output(cc + ['-flags']):
|
||||
conf.fatal('suncc %r was not found' % cc)
|
||||
except ValueError:
|
||||
conf.fatal('suncc -flags could not be executed')
|
||||
|
||||
v['CC'] = cc
|
||||
v['CC_NAME'] = 'sun'
|
||||
|
||||
@conftest
|
||||
def scc_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
|
||||
|
||||
v['CC_SRC_F'] = ''
|
||||
v['CC_TGT_F'] = ['-c', '-o', '']
|
||||
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = ''
|
||||
v['CCLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STATICLIB_ST'] = '-l%s'
|
||||
v['STATICLIBPATH_ST'] = '-L%s'
|
||||
v['CCDEFINES_ST'] = '-D%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h -Wl,%s'
|
||||
v['SHLIB_MARKER'] = '-Bdynamic'
|
||||
v['STATICLIB_MARKER'] = '-Bstatic'
|
||||
|
||||
# program
|
||||
v['program_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
|
||||
v['shlib_LINKFLAGS'] = ['-G']
|
||||
v['shlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['staticlib_LINKFLAGS'] = ['-Bstatic']
|
||||
v['staticlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
detect = '''
|
||||
find_scc
|
||||
find_cpp
|
||||
find_ar
|
||||
scc_common_flags
|
||||
cc_load_tools
|
||||
cc_add_flags
|
||||
link_add_flags
|
||||
'''
|
75
third_party/waf/wafadmin/Tools/suncxx.py
vendored
75
third_party/waf/wafadmin/Tools/suncxx.py
vendored
@ -1,75 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
|
||||
import os, optparse
|
||||
import Utils, Options, Configure
|
||||
import ccroot, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_sxx(conf):
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v['CXX']: cc = v['CXX']
|
||||
elif 'CXX' in conf.environ: cc = conf.environ['CXX']
|
||||
if not cc: cc = conf.find_program('c++', var='CXX')
|
||||
if not cc: conf.fatal('sunc++ was not found')
|
||||
cc = conf.cmd_to_list(cc)
|
||||
|
||||
try:
|
||||
if not Utils.cmd_output(cc + ['-flags']):
|
||||
conf.fatal('sunc++ %r was not found' % cc)
|
||||
except ValueError:
|
||||
conf.fatal('sunc++ -flags could not be executed')
|
||||
|
||||
v['CXX'] = cc
|
||||
v['CXX_NAME'] = 'sun'
|
||||
|
||||
@conftest
|
||||
def sxx_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
|
||||
|
||||
v['CXX_SRC_F'] = ''
|
||||
v['CXX_TGT_F'] = ['-c', '-o', '']
|
||||
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
|
||||
|
||||
# linker
|
||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
|
||||
v['CXXLNK_SRC_F'] = ''
|
||||
v['CXXLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STATICLIB_ST'] = '-l%s'
|
||||
v['STATICLIBPATH_ST'] = '-L%s'
|
||||
v['CXXDEFINES_ST'] = '-D%s'
|
||||
|
||||
v['SONAME_ST'] = '-Wl,-h -Wl,%s'
|
||||
v['SHLIB_MARKER'] = '-Bdynamic'
|
||||
v['STATICLIB_MARKER'] = '-Bstatic'
|
||||
|
||||
# program
|
||||
v['program_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC']
|
||||
v['shlib_LINKFLAGS'] = ['-G']
|
||||
v['shlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['staticlib_LINKFLAGS'] = ['-Bstatic']
|
||||
v['staticlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
detect = '''
|
||||
find_sxx
|
||||
find_cpp
|
||||
find_ar
|
||||
sxx_common_flags
|
||||
cxx_load_tools
|
||||
cxx_add_flags
|
||||
link_add_flags
|
||||
'''
|
250
third_party/waf/wafadmin/Tools/tex.py
vendored
250
third_party/waf/wafadmin/Tools/tex.py
vendored
@ -1,250 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
|
||||
"TeX/LaTeX/PDFLaTeX support"
|
||||
|
||||
import os, re
|
||||
import Utils, TaskGen, Task, Runner, Build
|
||||
from TaskGen import feature, before
|
||||
from Logs import error, warn, debug
|
||||
|
||||
re_tex = re.compile(r'\\(?P<type>include|input|import|bringin|lstinputlisting){(?P<file>[^{}]*)}', re.M)
|
||||
def scan(self):
|
||||
node = self.inputs[0]
|
||||
env = self.env
|
||||
|
||||
nodes = []
|
||||
names = []
|
||||
if not node: return (nodes, names)
|
||||
|
||||
code = Utils.readf(node.abspath(env))
|
||||
|
||||
curdirnode = self.curdirnode
|
||||
abs = curdirnode.abspath()
|
||||
for match in re_tex.finditer(code):
|
||||
path = match.group('file')
|
||||
if path:
|
||||
for k in ['', '.tex', '.ltx']:
|
||||
# add another loop for the tex include paths?
|
||||
debug('tex: trying %s%s' % (path, k))
|
||||
try:
|
||||
os.stat(abs+os.sep+path+k)
|
||||
except OSError:
|
||||
continue
|
||||
found = path+k
|
||||
node = curdirnode.find_resource(found)
|
||||
if node:
|
||||
nodes.append(node)
|
||||
else:
|
||||
debug('tex: could not find %s' % path)
|
||||
names.append(path)
|
||||
|
||||
debug("tex: found the following : %s and names %s" % (nodes, names))
|
||||
return (nodes, names)
|
||||
|
||||
latex_fun, _ = Task.compile_fun('latex', '${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
|
||||
pdflatex_fun, _ = Task.compile_fun('pdflatex', '${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
|
||||
bibtex_fun, _ = Task.compile_fun('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
|
||||
makeindex_fun, _ = Task.compile_fun('bibtex', '${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
|
||||
|
||||
g_bibtex_re = re.compile('bibdata', re.M)
|
||||
def tex_build(task, command='LATEX'):
|
||||
env = task.env
|
||||
bld = task.generator.bld
|
||||
|
||||
if not env['PROMPT_LATEX']:
|
||||
env.append_value('LATEXFLAGS', '-interaction=batchmode')
|
||||
env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
|
||||
|
||||
fun = latex_fun
|
||||
if command == 'PDFLATEX':
|
||||
fun = pdflatex_fun
|
||||
|
||||
node = task.inputs[0]
|
||||
reldir = node.bld_dir(env)
|
||||
|
||||
#lst = []
|
||||
#for c in Utils.split_path(reldir):
|
||||
# if c: lst.append('..')
|
||||
#srcfile = os.path.join(*(lst + [node.srcpath(env)]))
|
||||
#sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
|
||||
srcfile = node.abspath(env)
|
||||
sr2 = node.parent.abspath() + os.pathsep + node.parent.abspath(env) + os.pathsep
|
||||
|
||||
aux_node = node.change_ext('.aux')
|
||||
idx_node = node.change_ext('.idx')
|
||||
|
||||
nm = aux_node.name
|
||||
docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
|
||||
|
||||
# important, set the cwd for everybody
|
||||
task.cwd = task.inputs[0].parent.abspath(task.env)
|
||||
|
||||
|
||||
warn('first pass on %s' % command)
|
||||
|
||||
task.env.env = {'TEXINPUTS': sr2}
|
||||
task.env.SRCFILE = srcfile
|
||||
ret = fun(task)
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
# look in the .aux file if there is a bibfile to process
|
||||
try:
|
||||
ct = Utils.readf(aux_node.abspath(env))
|
||||
except (OSError, IOError):
|
||||
error('error bibtex scan')
|
||||
else:
|
||||
fo = g_bibtex_re.findall(ct)
|
||||
|
||||
# there is a .aux file to process
|
||||
if fo:
|
||||
warn('calling bibtex')
|
||||
|
||||
task.env.env = {'BIBINPUTS': sr2, 'BSTINPUTS': sr2}
|
||||
task.env.SRCFILE = docuname
|
||||
ret = bibtex_fun(task)
|
||||
if ret:
|
||||
error('error when calling bibtex %s' % docuname)
|
||||
return ret
|
||||
|
||||
# look on the filesystem if there is a .idx file to process
|
||||
try:
|
||||
idx_path = idx_node.abspath(env)
|
||||
os.stat(idx_path)
|
||||
except OSError:
|
||||
error('error file.idx scan')
|
||||
else:
|
||||
warn('calling makeindex')
|
||||
|
||||
task.env.SRCFILE = idx_node.name
|
||||
task.env.env = {}
|
||||
ret = makeindex_fun(task)
|
||||
if ret:
|
||||
error('error when calling makeindex %s' % idx_path)
|
||||
return ret
|
||||
|
||||
|
||||
hash = ''
|
||||
i = 0
|
||||
while i < 10:
|
||||
# prevent against infinite loops - one never knows
|
||||
i += 1
|
||||
|
||||
# watch the contents of file.aux
|
||||
prev_hash = hash
|
||||
try:
|
||||
hash = Utils.h_file(aux_node.abspath(env))
|
||||
except KeyError:
|
||||
error('could not read aux.h -> %s' % aux_node.abspath(env))
|
||||
pass
|
||||
|
||||
# debug
|
||||
#print "hash is, ", hash, " ", old_hash
|
||||
|
||||
# stop if file.aux does not change anymore
|
||||
if hash and hash == prev_hash:
|
||||
break
|
||||
|
||||
# run the command
|
||||
warn('calling %s' % command)
|
||||
|
||||
task.env.env = {'TEXINPUTS': sr2 + os.pathsep}
|
||||
task.env.SRCFILE = srcfile
|
||||
ret = fun(task)
|
||||
if ret:
|
||||
error('error when calling %s %s' % (command, latex_fun))
|
||||
return ret
|
||||
|
||||
return None # ok
|
||||
|
||||
latex_vardeps = ['LATEX', 'LATEXFLAGS']
|
||||
def latex_build(task):
|
||||
return tex_build(task, 'LATEX')
|
||||
|
||||
pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS']
|
||||
def pdflatex_build(task):
|
||||
return tex_build(task, 'PDFLATEX')
|
||||
|
||||
class tex_taskgen(TaskGen.task_gen):
|
||||
def __init__(self, *k, **kw):
|
||||
TaskGen.task_gen.__init__(self, *k, **kw)
|
||||
|
||||
@feature('tex')
|
||||
@before('apply_core')
|
||||
def apply_tex(self):
|
||||
if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
|
||||
self.type = 'pdflatex'
|
||||
|
||||
tree = self.bld
|
||||
outs = Utils.to_list(getattr(self, 'outs', []))
|
||||
|
||||
# prompt for incomplete files (else the batchmode is used)
|
||||
self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
|
||||
|
||||
deps_lst = []
|
||||
|
||||
if getattr(self, 'deps', None):
|
||||
deps = self.to_list(self.deps)
|
||||
for filename in deps:
|
||||
n = self.path.find_resource(filename)
|
||||
if not n in deps_lst: deps_lst.append(n)
|
||||
|
||||
self.source = self.to_list(self.source)
|
||||
for filename in self.source:
|
||||
base, ext = os.path.splitext(filename)
|
||||
|
||||
node = self.path.find_resource(filename)
|
||||
if not node: raise Utils.WafError('cannot find %s' % filename)
|
||||
|
||||
if self.type == 'latex':
|
||||
task = self.create_task('latex', node, node.change_ext('.dvi'))
|
||||
elif self.type == 'pdflatex':
|
||||
task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
|
||||
|
||||
task.env = self.env
|
||||
task.curdirnode = self.path
|
||||
|
||||
# add the manual dependencies
|
||||
if deps_lst:
|
||||
variant = node.variant(self.env)
|
||||
try:
|
||||
lst = tree.node_deps[task.unique_id()]
|
||||
for n in deps_lst:
|
||||
if not n in lst:
|
||||
lst.append(n)
|
||||
except KeyError:
|
||||
tree.node_deps[task.unique_id()] = deps_lst
|
||||
|
||||
if self.type == 'latex':
|
||||
if 'ps' in outs:
|
||||
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
|
||||
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
|
||||
if 'pdf' in outs:
|
||||
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
|
||||
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + os.pathsep + self.path.abspath() + os.pathsep + self.path.abspath(self.env)}
|
||||
elif self.type == 'pdflatex':
|
||||
if 'ps' in outs:
|
||||
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
|
||||
self.source = []
|
||||
|
||||
def detect(conf):
|
||||
v = conf.env
|
||||
for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
|
||||
conf.find_program(p, var=p.upper())
|
||||
v[p.upper()+'FLAGS'] = ''
|
||||
v['DVIPSFLAGS'] = '-Ppdf'
|
||||
|
||||
b = Task.simple_task_type
|
||||
b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
|
||||
b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False) # not used anywhere
|
||||
b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
|
||||
b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
|
||||
b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
|
||||
|
||||
b = Task.task_type_from_func
|
||||
cls = b('latex', latex_build, vars=latex_vardeps)
|
||||
cls.scan = scan
|
||||
cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
|
||||
cls.scan = scan
|
308
third_party/waf/wafadmin/Tools/unittestw.py
vendored
308
third_party/waf/wafadmin/Tools/unittestw.py
vendored
@ -1,308 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Carlos Rafael Giani, 2006
|
||||
|
||||
"""
|
||||
Unit tests run in the shutdown() method, and for c/c++ programs
|
||||
|
||||
One should NOT have to give parameters to programs to execute
|
||||
|
||||
In the shutdown method, add the following code:
|
||||
|
||||
>>> def shutdown():
|
||||
... ut = UnitTest.unit_test()
|
||||
... ut.run()
|
||||
... ut.print_results()
|
||||
|
||||
|
||||
Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
|
||||
"""
|
||||
import os, sys
|
||||
import Build, TaskGen, Utils, Options, Logs, Task
|
||||
from TaskGen import before, after, feature
|
||||
from Constants import *
|
||||
|
||||
class unit_test(object):
|
||||
"Unit test representation"
|
||||
def __init__(self):
|
||||
self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one
|
||||
# will cause the unit test to be marked as "FAILED".
|
||||
|
||||
# The following variables are filled with data by run().
|
||||
|
||||
# print_results() uses these for printing the unit test summary,
|
||||
# but if there is need for direct access to the results,
|
||||
# they can be retrieved here, after calling run().
|
||||
|
||||
self.num_tests_ok = 0 # Number of successful unit tests
|
||||
self.num_tests_failed = 0 # Number of failed unit tests
|
||||
self.num_tests_err = 0 # Tests that have not even run
|
||||
self.total_num_tests = 0 # Total amount of unit tests
|
||||
self.max_label_length = 0 # Maximum label length (pretty-print the output)
|
||||
|
||||
self.unit_tests = Utils.ordered_dict() # Unit test dictionary. Key: the label (unit test filename relative
|
||||
# to the build dir), value: unit test filename with absolute path
|
||||
self.unit_test_results = {} # Dictionary containing the unit test results.
|
||||
# Key: the label, value: result (true = success false = failure)
|
||||
self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests.
|
||||
# Key: the label, value: true = unit test has an error false = unit test is ok
|
||||
self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
|
||||
self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
|
||||
self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
|
||||
self.run_if_waf_does = 'check' #build was the old default
|
||||
|
||||
def run(self):
|
||||
"Run the unit tests and gather results (note: no output here)"
|
||||
|
||||
self.num_tests_ok = 0
|
||||
self.num_tests_failed = 0
|
||||
self.num_tests_err = 0
|
||||
self.total_num_tests = 0
|
||||
self.max_label_length = 0
|
||||
|
||||
self.unit_tests = Utils.ordered_dict()
|
||||
self.unit_test_results = {}
|
||||
self.unit_test_erroneous = {}
|
||||
|
||||
ld_library_path = []
|
||||
|
||||
# If waf is not building, don't run anything
|
||||
if not Options.commands[self.run_if_waf_does]: return
|
||||
|
||||
# Get the paths for the shared libraries, and obtain the unit tests to execute
|
||||
for obj in Build.bld.all_task_gen:
|
||||
try:
|
||||
link_task = obj.link_task
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
lib_path = link_task.outputs[0].parent.abspath(obj.env)
|
||||
if lib_path not in ld_library_path:
|
||||
ld_library_path.append(lib_path)
|
||||
|
||||
unit_test = getattr(obj, 'unit_test', '')
|
||||
if unit_test and 'cprogram' in obj.features:
|
||||
try:
|
||||
output = obj.path
|
||||
filename = os.path.join(output.abspath(obj.env), obj.target)
|
||||
srcdir = output.abspath()
|
||||
label = os.path.join(output.bldpath(obj.env), obj.target)
|
||||
self.max_label_length = max(self.max_label_length, len(label))
|
||||
self.unit_tests[label] = (filename, srcdir)
|
||||
except KeyError:
|
||||
pass
|
||||
self.total_num_tests = len(self.unit_tests)
|
||||
# Now run the unit tests
|
||||
Utils.pprint('GREEN', 'Running the unit tests')
|
||||
count = 0
|
||||
result = 1
|
||||
|
||||
for label in self.unit_tests.allkeys:
|
||||
file_and_src = self.unit_tests[label]
|
||||
filename = file_and_src[0]
|
||||
srcdir = file_and_src[1]
|
||||
count += 1
|
||||
line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
|
||||
if Options.options.progress_bar and line:
|
||||
sys.stderr.write(line)
|
||||
sys.stderr.flush()
|
||||
try:
|
||||
kwargs = {}
|
||||
kwargs['env'] = os.environ.copy()
|
||||
if self.change_to_testfile_dir:
|
||||
kwargs['cwd'] = srcdir
|
||||
if not self.want_to_see_test_output:
|
||||
kwargs['stdout'] = Utils.pproc.PIPE # PIPE for ignoring output
|
||||
if not self.want_to_see_test_error:
|
||||
kwargs['stderr'] = Utils.pproc.PIPE # PIPE for ignoring output
|
||||
if ld_library_path:
|
||||
v = kwargs['env']
|
||||
def add_path(dct, path, var):
|
||||
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
|
||||
if sys.platform == 'win32':
|
||||
add_path(v, ld_library_path, 'PATH')
|
||||
elif sys.platform == 'darwin':
|
||||
add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
|
||||
add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
|
||||
else:
|
||||
add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
|
||||
|
||||
pp = Utils.pproc.Popen(filename, **kwargs)
|
||||
(out, err) = pp.communicate() # uh, and the output is ignored?? - fortunately this is going to disappear
|
||||
|
||||
result = int(pp.returncode == self.returncode_ok)
|
||||
|
||||
if result:
|
||||
self.num_tests_ok += 1
|
||||
else:
|
||||
self.num_tests_failed += 1
|
||||
|
||||
self.unit_test_results[label] = result
|
||||
self.unit_test_erroneous[label] = 0
|
||||
except OSError:
|
||||
self.unit_test_erroneous[label] = 1
|
||||
self.num_tests_err += 1
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
|
||||
|
||||
def print_results(self):
|
||||
"Pretty-prints a summary of all unit tests, along with some statistics"
|
||||
|
||||
# If waf is not building, don't output anything
|
||||
if not Options.commands[self.run_if_waf_does]: return
|
||||
|
||||
p = Utils.pprint
|
||||
# Early quit if no tests were performed
|
||||
if self.total_num_tests == 0:
|
||||
p('YELLOW', 'No unit tests present')
|
||||
return
|
||||
|
||||
for label in self.unit_tests.allkeys:
|
||||
filename = self.unit_tests[label]
|
||||
err = 0
|
||||
result = 0
|
||||
|
||||
try: err = self.unit_test_erroneous[label]
|
||||
except KeyError: pass
|
||||
|
||||
try: result = self.unit_test_results[label]
|
||||
except KeyError: pass
|
||||
|
||||
n = self.max_label_length - len(label)
|
||||
if err: n += 4
|
||||
elif result: n += 7
|
||||
else: n += 3
|
||||
|
||||
line = '%s %s' % (label, '.' * n)
|
||||
|
||||
if err: p('RED', '%sERROR' % line)
|
||||
elif result: p('GREEN', '%sOK' % line)
|
||||
else: p('YELLOW', '%sFAILED' % line)
|
||||
|
||||
percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
|
||||
percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
|
||||
percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
|
||||
|
||||
p('NORMAL', '''
|
||||
Successful tests: %i (%.1f%%)
|
||||
Failed tests: %i (%.1f%%)
|
||||
Erroneous tests: %i (%.1f%%)
|
||||
|
||||
Total number of tests: %i
|
||||
''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
|
||||
self.num_tests_err, percentage_erroneous, self.total_num_tests))
|
||||
p('GREEN', 'Unit tests finished')
|
||||
|
||||
|
||||
############################################################################################
|
||||
|
||||
"""
|
||||
New unit test system
|
||||
|
||||
The targets with feature 'test' are executed after they are built
|
||||
bld(features='cprogram cc test', ...)
|
||||
|
||||
To display the results:
|
||||
import UnitTest
|
||||
bld.add_post_fun(UnitTest.summary)
|
||||
"""
|
||||
|
||||
import threading
|
||||
testlock = threading.Lock()
|
||||
|
||||
def set_options(opt):
|
||||
opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
|
||||
|
||||
@feature('test')
|
||||
@after('apply_link', 'vars_target_cprogram')
|
||||
def make_test(self):
|
||||
if not 'cprogram' in self.features:
|
||||
Logs.error('test cannot be executed %s' % self)
|
||||
return
|
||||
|
||||
self.default_install_path = None
|
||||
self.create_task('utest', self.link_task.outputs)
|
||||
|
||||
def exec_test(self):
|
||||
|
||||
status = 0
|
||||
|
||||
variant = self.env.variant()
|
||||
|
||||
filename = self.inputs[0].abspath(self.env)
|
||||
self.ut_exec = getattr(self, 'ut_exec', [filename])
|
||||
if getattr(self.generator, 'ut_fun', None):
|
||||
self.generator.ut_fun(self)
|
||||
|
||||
try:
|
||||
fu = getattr(self.generator.bld, 'all_test_paths')
|
||||
except AttributeError:
|
||||
fu = os.environ.copy()
|
||||
self.generator.bld.all_test_paths = fu
|
||||
|
||||
lst = []
|
||||
for obj in self.generator.bld.all_task_gen:
|
||||
link_task = getattr(obj, 'link_task', None)
|
||||
if link_task and link_task.env.variant() == variant:
|
||||
lst.append(link_task.outputs[0].parent.abspath(obj.env))
|
||||
|
||||
def add_path(dct, path, var):
|
||||
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
|
||||
|
||||
if sys.platform == 'win32':
|
||||
add_path(fu, lst, 'PATH')
|
||||
elif sys.platform == 'darwin':
|
||||
add_path(fu, lst, 'DYLD_LIBRARY_PATH')
|
||||
add_path(fu, lst, 'LD_LIBRARY_PATH')
|
||||
else:
|
||||
add_path(fu, lst, 'LD_LIBRARY_PATH')
|
||||
|
||||
|
||||
cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
|
||||
proc = Utils.pproc.Popen(self.ut_exec, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
|
||||
(stdout, stderr) = proc.communicate()
|
||||
|
||||
tup = (filename, proc.returncode, stdout, stderr)
|
||||
self.generator.utest_result = tup
|
||||
|
||||
testlock.acquire()
|
||||
try:
|
||||
bld = self.generator.bld
|
||||
Logs.debug("ut: %r", tup)
|
||||
try:
|
||||
bld.utest_results.append(tup)
|
||||
except AttributeError:
|
||||
bld.utest_results = [tup]
|
||||
finally:
|
||||
testlock.release()
|
||||
|
||||
cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
|
||||
|
||||
old = cls.runnable_status
|
||||
def test_status(self):
|
||||
ret = old(self)
|
||||
if ret == SKIP_ME and getattr(Options.options, 'all_tests', False):
|
||||
return RUN_ME
|
||||
return ret
|
||||
|
||||
cls.runnable_status = test_status
|
||||
cls.quiet = 1
|
||||
|
||||
def summary(bld):
|
||||
lst = getattr(bld, 'utest_results', [])
|
||||
if lst:
|
||||
Utils.pprint('CYAN', 'execution summary')
|
||||
|
||||
total = len(lst)
|
||||
tfail = len([x for x in lst if x[1]])
|
||||
|
||||
Utils.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
|
||||
for (f, code, out, err) in lst:
|
||||
if not code:
|
||||
Utils.pprint('CYAN', ' %s' % f)
|
||||
|
||||
Utils.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
|
||||
for (f, code, out, err) in lst:
|
||||
if code:
|
||||
Utils.pprint('CYAN', ' %s' % f)
|
307
third_party/waf/wafadmin/Tools/vala.py
vendored
307
third_party/waf/wafadmin/Tools/vala.py
vendored
@ -1,307 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Ali Sabil, 2007
|
||||
|
||||
import os.path, shutil
|
||||
import Task, Runner, Utils, Logs, Build, Node, Options
|
||||
from TaskGen import extension, after, before
|
||||
|
||||
EXT_VALA = ['.vala', '.gs']
|
||||
|
||||
class valac_task(Task.Task):
|
||||
|
||||
vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
|
||||
before = ("cc", "cxx")
|
||||
|
||||
def run(self):
|
||||
env = self.env
|
||||
inputs = [a.srcpath(env) for a in self.inputs]
|
||||
valac = env['VALAC']
|
||||
vala_flags = env.get_flat('VALAFLAGS')
|
||||
top_src = self.generator.bld.srcnode.abspath()
|
||||
top_bld = self.generator.bld.srcnode.abspath(env)
|
||||
|
||||
if env['VALAC_VERSION'] > (0, 1, 6):
|
||||
cmd = [valac, '-C', '--quiet', vala_flags]
|
||||
else:
|
||||
cmd = [valac, '-C', vala_flags]
|
||||
|
||||
if self.threading:
|
||||
cmd.append('--thread')
|
||||
|
||||
if self.profile:
|
||||
cmd.append('--profile=%s' % self.profile)
|
||||
|
||||
if self.target_glib:
|
||||
cmd.append('--target-glib=%s' % self.target_glib)
|
||||
|
||||
features = self.generator.features
|
||||
|
||||
if 'cshlib' in features or 'cstaticlib' in features:
|
||||
output_dir = self.outputs[0].bld_dir(env)
|
||||
cmd.append('--library ' + self.target)
|
||||
if env['VALAC_VERSION'] >= (0, 7, 0):
|
||||
for x in self.outputs:
|
||||
if x.name.endswith('.h'):
|
||||
cmd.append('--header ' + x.bldpath(self.env))
|
||||
cmd.append('--basedir ' + top_src)
|
||||
cmd.append('-d ' + top_bld)
|
||||
if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
|
||||
cmd.append('--gir=%s.gir' % self.gir)
|
||||
|
||||
else:
|
||||
output_dir = self.outputs[0].bld_dir(env)
|
||||
cmd.append('-d %s' % output_dir)
|
||||
|
||||
for vapi_dir in self.vapi_dirs:
|
||||
cmd.append('--vapidir=%s' % vapi_dir)
|
||||
|
||||
for package in self.packages:
|
||||
cmd.append('--pkg %s' % package)
|
||||
|
||||
for package in self.packages_private:
|
||||
cmd.append('--pkg %s' % package)
|
||||
|
||||
cmd.append(" ".join(inputs))
|
||||
result = self.generator.bld.exec_command(" ".join(cmd))
|
||||
|
||||
if not 'cprogram' in features:
|
||||
# generate the .deps file
|
||||
if self.packages:
|
||||
filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
|
||||
deps = open(filename, 'w')
|
||||
for package in self.packages:
|
||||
deps.write(package + '\n')
|
||||
deps.close()
|
||||
|
||||
# handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
|
||||
self._fix_output("../%s.vapi" % self.target)
|
||||
# handle vala >= 0.1.7 who has a weid definition for --directory
|
||||
self._fix_output("%s.vapi" % self.target)
|
||||
# handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
|
||||
self._fix_output("%s.gidl" % self.target)
|
||||
# handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
|
||||
self._fix_output("%s.gir" % self.target)
|
||||
if hasattr(self, 'gir'):
|
||||
self._fix_output("%s.gir" % self.gir)
|
||||
|
||||
first = None
|
||||
for node in self.outputs:
|
||||
if not first:
|
||||
first = node
|
||||
else:
|
||||
if first.parent.id != node.parent.id:
|
||||
# issue #483
|
||||
if env['VALAC_VERSION'] < (0, 7, 0):
|
||||
shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
|
||||
return result
|
||||
|
||||
def install(self):
|
||||
bld = self.generator.bld
|
||||
features = self.generator.features
|
||||
|
||||
if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
|
||||
headers_list = [o for o in self.outputs if o.suffix() == ".h"]
|
||||
vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
|
||||
gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
|
||||
|
||||
for header in headers_list:
|
||||
top_src = self.generator.bld.srcnode
|
||||
package = self.env['PACKAGE']
|
||||
try:
|
||||
api_version = Utils.g_module.API_VERSION
|
||||
except AttributeError:
|
||||
version = Utils.g_module.VERSION.split(".")
|
||||
if version[0] == "0":
|
||||
api_version = "0." + version[1]
|
||||
else:
|
||||
api_version = version[0] + ".0"
|
||||
install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
|
||||
bld.install_as(install_path, header, self.env)
|
||||
bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env)
|
||||
bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env)
|
||||
|
||||
def _fix_output(self, output):
|
||||
top_bld = self.generator.bld.srcnode.abspath(self.env)
|
||||
try:
|
||||
src = os.path.join(top_bld, output)
|
||||
dst = self.generator.path.abspath (self.env)
|
||||
shutil.move(src, dst)
|
||||
except:
|
||||
pass
|
||||
|
||||
@extension(EXT_VALA)
|
||||
def vala_file(self, node):
|
||||
valatask = getattr(self, "valatask", None)
|
||||
# there is only one vala task and it compiles all vala files .. :-/
|
||||
if not valatask:
|
||||
valatask = self.create_task('valac')
|
||||
self.valatask = valatask
|
||||
self.includes = Utils.to_list(getattr(self, 'includes', []))
|
||||
self.uselib = self.to_list(self.uselib)
|
||||
valatask.packages = []
|
||||
valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
|
||||
valatask.vapi_dirs = []
|
||||
valatask.target = self.target
|
||||
valatask.threading = False
|
||||
valatask.install_path = self.install_path
|
||||
valatask.profile = getattr (self, 'profile', 'gobject')
|
||||
valatask.target_glib = None #Deprecated
|
||||
|
||||
packages = Utils.to_list(getattr(self, 'packages', []))
|
||||
vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
|
||||
includes = []
|
||||
|
||||
if hasattr(self, 'uselib_local'):
|
||||
local_packages = Utils.to_list(self.uselib_local)
|
||||
seen = []
|
||||
while len(local_packages) > 0:
|
||||
package = local_packages.pop()
|
||||
if package in seen:
|
||||
continue
|
||||
seen.append(package)
|
||||
|
||||
# check if the package exists
|
||||
package_obj = self.name_to_obj(package)
|
||||
if not package_obj:
|
||||
raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
|
||||
|
||||
package_name = package_obj.target
|
||||
package_node = package_obj.path
|
||||
package_dir = package_node.relpath_gen(self.path)
|
||||
|
||||
for task in package_obj.tasks:
|
||||
for output in task.outputs:
|
||||
if output.name == package_name + ".vapi":
|
||||
valatask.set_run_after(task)
|
||||
if package_name not in packages:
|
||||
packages.append(package_name)
|
||||
if package_dir not in vapi_dirs:
|
||||
vapi_dirs.append(package_dir)
|
||||
if package_dir not in includes:
|
||||
includes.append(package_dir)
|
||||
|
||||
if hasattr(package_obj, 'uselib_local'):
|
||||
lst = self.to_list(package_obj.uselib_local)
|
||||
lst.reverse()
|
||||
local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
|
||||
|
||||
valatask.packages = packages
|
||||
for vapi_dir in vapi_dirs:
|
||||
try:
|
||||
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
|
||||
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
|
||||
except AttributeError:
|
||||
Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
|
||||
|
||||
self.includes.append(node.bld.srcnode.abspath())
|
||||
self.includes.append(node.bld.srcnode.abspath(self.env))
|
||||
for include in includes:
|
||||
try:
|
||||
self.includes.append(self.path.find_dir(include).abspath())
|
||||
self.includes.append(self.path.find_dir(include).abspath(self.env))
|
||||
except AttributeError:
|
||||
Logs.warn("Unable to locate include directory: '%s'" % include)
|
||||
|
||||
if valatask.profile == 'gobject':
|
||||
if hasattr(self, 'target_glib'):
|
||||
Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
|
||||
|
||||
if getattr(Options.options, 'vala_target_glib', None):
|
||||
valatask.target_glib = Options.options.vala_target_glib
|
||||
|
||||
if not 'GOBJECT' in self.uselib:
|
||||
self.uselib.append('GOBJECT')
|
||||
|
||||
if hasattr(self, 'threading'):
|
||||
if valatask.profile == 'gobject':
|
||||
valatask.threading = self.threading
|
||||
if not 'GTHREAD' in self.uselib:
|
||||
self.uselib.append('GTHREAD')
|
||||
else:
|
||||
#Vala doesn't have threading support for dova nor posix
|
||||
Logs.warn("Profile %s does not have threading support" % valatask.profile)
|
||||
|
||||
if hasattr(self, 'gir'):
|
||||
valatask.gir = self.gir
|
||||
|
||||
env = valatask.env
|
||||
|
||||
output_nodes = []
|
||||
|
||||
c_node = node.change_ext('.c')
|
||||
output_nodes.append(c_node)
|
||||
self.allnodes.append(c_node)
|
||||
|
||||
if env['VALAC_VERSION'] < (0, 7, 0):
|
||||
output_nodes.append(node.change_ext('.h'))
|
||||
else:
|
||||
if not 'cprogram' in self.features:
|
||||
output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
|
||||
|
||||
if not 'cprogram' in self.features:
|
||||
output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
|
||||
if env['VALAC_VERSION'] > (0, 7, 2):
|
||||
if hasattr(self, 'gir'):
|
||||
output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
|
||||
elif env['VALAC_VERSION'] > (0, 3, 5):
|
||||
output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
|
||||
elif env['VALAC_VERSION'] > (0, 1, 7):
|
||||
output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
|
||||
if valatask.packages:
|
||||
output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
|
||||
|
||||
valatask.inputs.append(node)
|
||||
valatask.outputs.extend(output_nodes)
|
||||
|
||||
def detect(conf):
|
||||
min_version = (0, 1, 6)
|
||||
min_version_str = "%d.%d.%d" % min_version
|
||||
|
||||
valac = conf.find_program('valac', var='VALAC', mandatory=True)
|
||||
|
||||
if not conf.env["HAVE_GOBJECT"]:
|
||||
pkg_args = {'package': 'gobject-2.0',
|
||||
'uselib_store': 'GOBJECT',
|
||||
'args': '--cflags --libs'}
|
||||
if getattr(Options.options, 'vala_target_glib', None):
|
||||
pkg_args['atleast_version'] = Options.options.vala_target_glib
|
||||
|
||||
conf.check_cfg(**pkg_args)
|
||||
|
||||
if not conf.env["HAVE_GTHREAD"]:
|
||||
pkg_args = {'package': 'gthread-2.0',
|
||||
'uselib_store': 'GTHREAD',
|
||||
'args': '--cflags --libs'}
|
||||
if getattr(Options.options, 'vala_target_glib', None):
|
||||
pkg_args['atleast_version'] = Options.options.vala_target_glib
|
||||
|
||||
conf.check_cfg(**pkg_args)
|
||||
|
||||
try:
|
||||
output = Utils.cmd_output(valac + " --version", silent=True)
|
||||
version = output.split(' ', 1)[-1].strip().split(".")[0:3]
|
||||
version = [int(x) for x in version]
|
||||
valac_version = tuple(version)
|
||||
except Exception:
|
||||
valac_version = (0, 0, 0)
|
||||
|
||||
conf.check_message('program version',
|
||||
'valac >= ' + min_version_str,
|
||||
valac_version >= min_version,
|
||||
"%d.%d.%d" % valac_version)
|
||||
|
||||
conf.check_tool('gnu_dirs')
|
||||
|
||||
if valac_version < min_version:
|
||||
conf.fatal("valac version too old to be used with this tool")
|
||||
return
|
||||
|
||||
conf.env['VALAC_VERSION'] = valac_version
|
||||
conf.env['VALAFLAGS'] = ''
|
||||
|
||||
def set_options (opt):
|
||||
valaopts = opt.add_option_group('Vala Compiler Options')
|
||||
valaopts.add_option ('--vala-target-glib', default=None,
|
||||
dest='vala_target_glib', metavar='MAJOR.MINOR',
|
||||
help='Target version of glib for Vala GObject code generation')
|
44
third_party/waf/wafadmin/Tools/winres.py
vendored
44
third_party/waf/wafadmin/Tools/winres.py
vendored
@ -1,44 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Brant Young, 2007
|
||||
|
||||
"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
|
||||
|
||||
import os, sys, re
|
||||
import TaskGen, Task
|
||||
from Utils import quote_whitespace
|
||||
from TaskGen import extension
|
||||
|
||||
EXT_WINRC = ['.rc']
|
||||
|
||||
winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
|
||||
|
||||
@extension(EXT_WINRC)
|
||||
def rc_file(self, node):
|
||||
obj_ext = '.rc.o'
|
||||
if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
|
||||
|
||||
rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
|
||||
self.compiled_tasks.append(rctask)
|
||||
|
||||
# create our action, for use with rc file
|
||||
Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
|
||||
|
||||
def detect(conf):
|
||||
v = conf.env
|
||||
|
||||
winrc = v['WINRC']
|
||||
v['WINRC_TGT_F'] = '-o'
|
||||
v['WINRC_SRC_F'] = '-i'
|
||||
# find rc.exe
|
||||
if not winrc:
|
||||
if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
|
||||
winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
|
||||
elif v['CC_NAME'] == 'msvc':
|
||||
winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
|
||||
v['WINRC_TGT_F'] = '/fo'
|
||||
v['WINRC_SRC_F'] = ''
|
||||
if not winrc:
|
||||
conf.fatal('winrc was not found!')
|
||||
|
||||
v['WINRCFLAGS'] = ''
|
78
third_party/waf/wafadmin/Tools/xlc.py
vendored
78
third_party/waf/wafadmin/Tools/xlc.py
vendored
@ -1,78 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2008 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
# Michael Kuhn, 2009
|
||||
|
||||
import os, sys
|
||||
import Configure, Options, Utils
|
||||
import ccroot, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_xlc(conf):
|
||||
cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
|
||||
cc = conf.cmd_to_list(cc)
|
||||
conf.env.CC_NAME = 'xlc'
|
||||
conf.env.CC = cc
|
||||
|
||||
@conftest
|
||||
def find_cpp(conf):
|
||||
v = conf.env
|
||||
cpp = None
|
||||
if v['CPP']: cpp = v['CPP']
|
||||
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
|
||||
#if not cpp: cpp = v['CC']
|
||||
v['CPP'] = cpp
|
||||
|
||||
@conftest
|
||||
def xlc_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
|
||||
v['CCFLAGS_DEBUG'] = ['-g']
|
||||
v['CCFLAGS_RELEASE'] = ['-O2']
|
||||
|
||||
v['CC_SRC_F'] = ''
|
||||
v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
|
||||
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = ''
|
||||
v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STATICLIB_ST'] = '-l%s'
|
||||
v['STATICLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
v['CCDEFINES_ST'] = '-D%s'
|
||||
|
||||
v['SONAME_ST'] = ''
|
||||
v['SHLIB_MARKER'] = ''
|
||||
v['STATICLIB_MARKER'] = ''
|
||||
v['FULLSTATIC_MARKER'] = '-static'
|
||||
|
||||
# program
|
||||
v['program_LINKFLAGS'] = ['-Wl,-brtl']
|
||||
v['program_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
|
||||
v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
|
||||
v['shlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['staticlib_LINKFLAGS'] = ''
|
||||
v['staticlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
def detect(conf):
|
||||
conf.find_xlc()
|
||||
conf.find_cpp()
|
||||
conf.find_ar()
|
||||
conf.xlc_common_flags()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
78
third_party/waf/wafadmin/Tools/xlcxx.py
vendored
78
third_party/waf/wafadmin/Tools/xlcxx.py
vendored
@ -1,78 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
# Yinon Ehrlich, 2009
|
||||
# Michael Kuhn, 2009
|
||||
|
||||
import os, sys
|
||||
import Configure, Options, Utils
|
||||
import ccroot, ar
|
||||
from Configure import conftest
|
||||
|
||||
@conftest
|
||||
def find_xlcxx(conf):
|
||||
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
|
||||
cxx = conf.cmd_to_list(cxx)
|
||||
conf.env.CXX_NAME = 'xlc++'
|
||||
conf.env.CXX = cxx
|
||||
|
||||
@conftest
|
||||
def find_cpp(conf):
|
||||
v = conf.env
|
||||
cpp = None
|
||||
if v['CPP']: cpp = v['CPP']
|
||||
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
|
||||
#if not cpp: cpp = v['CXX']
|
||||
v['CPP'] = cpp
|
||||
|
||||
@conftest
|
||||
def xlcxx_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
|
||||
v['CXXFLAGS_DEBUG'] = ['-g']
|
||||
v['CXXFLAGS_RELEASE'] = ['-O2']
|
||||
|
||||
v['CXX_SRC_F'] = ''
|
||||
v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
|
||||
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
|
||||
|
||||
# linker
|
||||
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
|
||||
v['CXXLNK_SRC_F'] = ''
|
||||
v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
|
||||
|
||||
v['LIB_ST'] = '-l%s' # template for adding libs
|
||||
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
|
||||
v['STATICLIB_ST'] = '-l%s'
|
||||
v['STATICLIBPATH_ST'] = '-L%s'
|
||||
v['RPATH_ST'] = '-Wl,-rpath,%s'
|
||||
v['CXXDEFINES_ST'] = '-D%s'
|
||||
|
||||
v['SONAME_ST'] = ''
|
||||
v['SHLIB_MARKER'] = ''
|
||||
v['STATICLIB_MARKER'] = ''
|
||||
v['FULLSTATIC_MARKER'] = '-static'
|
||||
|
||||
# program
|
||||
v['program_LINKFLAGS'] = ['-Wl,-brtl']
|
||||
v['program_PATTERN'] = '%s'
|
||||
|
||||
# shared library
|
||||
v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
|
||||
v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
|
||||
v['shlib_PATTERN'] = 'lib%s.so'
|
||||
|
||||
# static lib
|
||||
v['staticlib_LINKFLAGS'] = ''
|
||||
v['staticlib_PATTERN'] = 'lib%s.a'
|
||||
|
||||
def detect(conf):
|
||||
conf.find_xlcxx()
|
||||
conf.find_cpp()
|
||||
conf.find_ar()
|
||||
conf.xlcxx_common_flags()
|
||||
conf.cxx_load_tools()
|
||||
conf.cxx_add_flags()
|
||||
conf.link_add_flags()
|
747
third_party/waf/wafadmin/Utils.py
vendored
747
third_party/waf/wafadmin/Utils.py
vendored
@ -1,747 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005 (ita)
|
||||
|
||||
"""
|
||||
Utilities, the stable ones are the following:
|
||||
|
||||
* h_file: compute a unique value for a file (hash), it uses
|
||||
the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
|
||||
else, md5 (see the python docs)
|
||||
|
||||
For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
|
||||
it is possible to use a hashing based on the path and the size (may give broken cache results)
|
||||
The method h_file MUST raise an OSError if the file is a folder
|
||||
|
||||
import stat
|
||||
def h_file(filename):
|
||||
st = os.lstat(filename)
|
||||
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
|
||||
m = Utils.md5()
|
||||
m.update(str(st.st_mtime))
|
||||
m.update(str(st.st_size))
|
||||
m.update(filename)
|
||||
return m.digest()
|
||||
|
||||
To replace the function in your project, use something like this:
|
||||
import Utils
|
||||
Utils.h_file = h_file
|
||||
|
||||
* h_list
|
||||
* h_fun
|
||||
* get_term_cols
|
||||
* ordered_dict
|
||||
|
||||
"""
|
||||
|
||||
import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
|
||||
|
||||
# In python 3.0 we can get rid of all this
|
||||
try: from UserDict import UserDict
|
||||
except ImportError: from collections import UserDict
|
||||
if sys.hexversion >= 0x2060000 or os.name == 'java':
|
||||
import subprocess as pproc
|
||||
else:
|
||||
import pproc
|
||||
import Logs
|
||||
from Constants import *
|
||||
|
||||
try:
|
||||
from collections import deque
|
||||
except ImportError:
|
||||
class deque(list):
|
||||
def popleft(self):
|
||||
return self.pop(0)
|
||||
|
||||
is_win32 = sys.platform == 'win32'
|
||||
|
||||
try:
|
||||
# defaultdict in python 2.5
|
||||
from collections import defaultdict as DefaultDict
|
||||
except ImportError:
|
||||
class DefaultDict(dict):
|
||||
def __init__(self, default_factory):
|
||||
super(DefaultDict, self).__init__()
|
||||
self.default_factory = default_factory
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return super(DefaultDict, self).__getitem__(key)
|
||||
except KeyError:
|
||||
value = self.default_factory()
|
||||
self[key] = value
|
||||
return value
|
||||
|
||||
class WafError(Exception):
|
||||
def __init__(self, *args):
|
||||
self.args = args
|
||||
try:
|
||||
self.stack = traceback.extract_stack()
|
||||
except:
|
||||
pass
|
||||
Exception.__init__(self, *args)
|
||||
def __str__(self):
|
||||
return str(len(self.args) == 1 and self.args[0] or self.args)
|
||||
|
||||
class WscriptError(WafError):
|
||||
def __init__(self, message, wscript_file=None):
|
||||
if wscript_file:
|
||||
self.wscript_file = wscript_file
|
||||
self.wscript_line = None
|
||||
else:
|
||||
try:
|
||||
(self.wscript_file, self.wscript_line) = self.locate_error()
|
||||
except:
|
||||
(self.wscript_file, self.wscript_line) = (None, None)
|
||||
|
||||
msg_file_line = ''
|
||||
if self.wscript_file:
|
||||
msg_file_line = "%s:" % self.wscript_file
|
||||
if self.wscript_line:
|
||||
msg_file_line += "%s:" % self.wscript_line
|
||||
err_message = "%s error: %s" % (msg_file_line, message)
|
||||
WafError.__init__(self, err_message)
|
||||
|
||||
def locate_error(self):
|
||||
stack = traceback.extract_stack()
|
||||
stack.reverse()
|
||||
for frame in stack:
|
||||
file_name = os.path.basename(frame[0])
|
||||
is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
|
||||
if is_wscript:
|
||||
return (frame[0], frame[1])
|
||||
return (None, None)
|
||||
|
||||
indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
|
||||
|
||||
try:
|
||||
from fnv import new as md5
|
||||
import Constants
|
||||
Constants.SIG_NIL = 'signofnv'
|
||||
|
||||
def h_file(filename):
|
||||
m = md5()
|
||||
try:
|
||||
m.hfile(filename)
|
||||
x = m.digest()
|
||||
if x is None: raise OSError("not a file")
|
||||
return x
|
||||
except SystemError:
|
||||
raise OSError("not a file" + filename)
|
||||
|
||||
except ImportError:
|
||||
try:
|
||||
try:
|
||||
from hashlib import md5
|
||||
except ImportError:
|
||||
from md5 import md5
|
||||
|
||||
def h_file(filename):
|
||||
f = open(filename, 'rb')
|
||||
m = md5()
|
||||
while (filename):
|
||||
filename = f.read(100000)
|
||||
m.update(filename)
|
||||
f.close()
|
||||
return m.digest()
|
||||
except ImportError:
|
||||
# portability fixes may be added elsewhere (although, md5 should be everywhere by now)
|
||||
md5 = None
|
||||
|
||||
def readf(fname, m='r', encoding='ISO8859-1'):
|
||||
"""backported from waf 1.8"""
|
||||
if sys.hexversion > 0x3000000 and not 'b' in m:
|
||||
m += 'b'
|
||||
f = open(fname, m)
|
||||
try:
|
||||
txt = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
if encoding:
|
||||
txt = txt.decode(encoding)
|
||||
else:
|
||||
txt = txt.decode()
|
||||
else:
|
||||
f = open(fname, m)
|
||||
try:
|
||||
txt = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
return txt
|
||||
|
||||
def writef(fname, data, m='w', encoding='ISO8859-1'):
|
||||
"""backported from waf 1.8"""
|
||||
if sys.hexversion > 0x3000000 and not 'b' in m:
|
||||
data = data.encode(encoding)
|
||||
m += 'b'
|
||||
f = open(fname, m)
|
||||
try:
|
||||
f.write(data)
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
class ordered_dict(UserDict):
|
||||
def __init__(self, dict = None):
|
||||
self.allkeys = []
|
||||
UserDict.__init__(self, dict)
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.allkeys.remove(key)
|
||||
UserDict.__delitem__(self, key)
|
||||
|
||||
def __setitem__(self, key, item):
|
||||
if key not in self.allkeys: self.allkeys.append(key)
|
||||
UserDict.__setitem__(self, key, item)
|
||||
|
||||
def exec_command(s, **kw):
|
||||
if 'log' in kw:
|
||||
kw['stdout'] = kw['stderr'] = kw['log']
|
||||
del(kw['log'])
|
||||
kw['shell'] = isinstance(s, str)
|
||||
|
||||
try:
|
||||
proc = pproc.Popen(s, **kw)
|
||||
return proc.wait()
|
||||
except OSError:
|
||||
return -1
|
||||
|
||||
if is_win32:
|
||||
def exec_command(s, **kw):
|
||||
if 'log' in kw:
|
||||
kw['stdout'] = kw['stderr'] = kw['log']
|
||||
del(kw['log'])
|
||||
kw['shell'] = isinstance(s, str)
|
||||
|
||||
if len(s) > 2000:
|
||||
startupinfo = pproc.STARTUPINFO()
|
||||
startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
|
||||
kw['startupinfo'] = startupinfo
|
||||
|
||||
try:
|
||||
if 'stdout' not in kw:
|
||||
kw['stdout'] = pproc.PIPE
|
||||
kw['stderr'] = pproc.PIPE
|
||||
kw['universal_newlines'] = True
|
||||
proc = pproc.Popen(s,**kw)
|
||||
(stdout, stderr) = proc.communicate()
|
||||
Logs.info(stdout)
|
||||
if stderr:
|
||||
Logs.error(stderr)
|
||||
return proc.returncode
|
||||
else:
|
||||
proc = pproc.Popen(s,**kw)
|
||||
return proc.wait()
|
||||
except OSError:
|
||||
return -1
|
||||
|
||||
listdir = os.listdir
|
||||
if is_win32:
|
||||
def listdir_win32(s):
|
||||
if re.match('^[A-Za-z]:$', s):
|
||||
# os.path.isdir fails if s contains only the drive name... (x:)
|
||||
s += os.sep
|
||||
if not os.path.isdir(s):
|
||||
e = OSError()
|
||||
e.errno = errno.ENOENT
|
||||
raise e
|
||||
return os.listdir(s)
|
||||
listdir = listdir_win32
|
||||
|
||||
def waf_version(mini = 0x010000, maxi = 0x100000):
|
||||
"Halts if the waf version is wrong"
|
||||
ver = HEXVERSION
|
||||
try: min_val = mini + 0
|
||||
except TypeError: min_val = int(mini.replace('.', '0'), 16)
|
||||
|
||||
if min_val > ver:
|
||||
Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
|
||||
sys.exit(1)
|
||||
|
||||
try: max_val = maxi + 0
|
||||
except TypeError: max_val = int(maxi.replace('.', '0'), 16)
|
||||
|
||||
if max_val < ver:
|
||||
Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
|
||||
sys.exit(1)
|
||||
|
||||
def python_24_guard():
|
||||
if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
|
||||
raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
|
||||
|
||||
def ex_stack():
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
if Logs.verbose > 1:
|
||||
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
|
||||
return ''.join(exc_lines)
|
||||
return str(exc_value)
|
||||
|
||||
def to_list(sth):
|
||||
if isinstance(sth, str):
|
||||
return sth.split()
|
||||
else:
|
||||
return sth
|
||||
|
||||
g_loaded_modules = {}
|
||||
"index modules by absolute path"
|
||||
|
||||
g_module=None
|
||||
"the main module is special"
|
||||
|
||||
def load_module(file_path, name=WSCRIPT_FILE):
|
||||
"this function requires an absolute path"
|
||||
try:
|
||||
return g_loaded_modules[file_path]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
module = imp.new_module(name)
|
||||
|
||||
try:
|
||||
code = readf(file_path, m='rU')
|
||||
except (IOError, OSError):
|
||||
raise WscriptError('Could not read the file %r' % file_path)
|
||||
|
||||
module.waf_hash_val = code
|
||||
|
||||
dt = os.path.dirname(file_path)
|
||||
sys.path.insert(0, dt)
|
||||
try:
|
||||
exec(compile(code, file_path, 'exec'), module.__dict__)
|
||||
except Exception:
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
|
||||
sys.path.remove(dt)
|
||||
|
||||
g_loaded_modules[file_path] = module
|
||||
|
||||
return module
|
||||
|
||||
def set_main_module(file_path):
|
||||
"Load custom options, if defined"
|
||||
global g_module
|
||||
g_module = load_module(file_path, 'wscript_main')
|
||||
g_module.root_path = file_path
|
||||
|
||||
try:
|
||||
g_module.APPNAME
|
||||
except:
|
||||
g_module.APPNAME = 'noname'
|
||||
try:
|
||||
g_module.VERSION
|
||||
except:
|
||||
g_module.VERSION = '1.0'
|
||||
|
||||
# note: to register the module globally, use the following:
|
||||
# sys.modules['wscript_main'] = g_module
|
||||
|
||||
def to_hashtable(s):
|
||||
"used for importing env files"
|
||||
tbl = {}
|
||||
lst = s.split('\n')
|
||||
for line in lst:
|
||||
if not line: continue
|
||||
mems = line.split('=')
|
||||
tbl[mems[0]] = mems[1]
|
||||
return tbl
|
||||
|
||||
def get_term_cols():
|
||||
"console width"
|
||||
return 80
|
||||
try:
|
||||
import struct, fcntl, termios
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
if Logs.got_tty:
|
||||
def myfun():
|
||||
dummy_lines, cols = struct.unpack("HHHH", \
|
||||
fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
|
||||
struct.pack("HHHH", 0, 0, 0, 0)))[:2]
|
||||
return cols
|
||||
# we actually try the function once to see if it is suitable
|
||||
try:
|
||||
myfun()
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
get_term_cols = myfun
|
||||
|
||||
rot_idx = 0
|
||||
rot_chr = ['\\', '|', '/', '-']
|
||||
"the rotation character in the progress bar"
|
||||
|
||||
|
||||
def split_path(path):
|
||||
return path.split('/')
|
||||
|
||||
def split_path_cygwin(path):
|
||||
if path.startswith('//'):
|
||||
ret = path.split('/')[2:]
|
||||
ret[0] = '/' + ret[0]
|
||||
return ret
|
||||
return path.split('/')
|
||||
|
||||
re_sp = re.compile('[/\\\\]')
|
||||
def split_path_win32(path):
|
||||
if path.startswith('\\\\'):
|
||||
ret = re.split(re_sp, path)[2:]
|
||||
ret[0] = '\\' + ret[0]
|
||||
return ret
|
||||
return re.split(re_sp, path)
|
||||
|
||||
if sys.platform == 'cygwin':
|
||||
split_path = split_path_cygwin
|
||||
elif is_win32:
|
||||
split_path = split_path_win32
|
||||
|
||||
def copy_attrs(orig, dest, names, only_if_set=False):
|
||||
for a in to_list(names):
|
||||
u = getattr(orig, a, ())
|
||||
if u or not only_if_set:
|
||||
setattr(dest, a, u)
|
||||
|
||||
def def_attrs(cls, **kw):
|
||||
'''
|
||||
set attributes for class.
|
||||
@param cls [any class]: the class to update the given attributes in.
|
||||
@param kw [dictionary]: dictionary of attributes names and values.
|
||||
|
||||
if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
|
||||
'''
|
||||
for k, v in kw.iteritems():
|
||||
if not hasattr(cls, k):
|
||||
setattr(cls, k, v)
|
||||
|
||||
def quote_define_name(path):
|
||||
fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
|
||||
fu = fu.upper()
|
||||
return fu
|
||||
|
||||
def quote_whitespace(path):
|
||||
return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
|
||||
|
||||
def trimquotes(s):
|
||||
if not s: return ''
|
||||
s = s.rstrip()
|
||||
if s[0] == "'" and s[-1] == "'": return s[1:-1]
|
||||
return s
|
||||
|
||||
def h_list(lst):
|
||||
m = md5()
|
||||
m.update(str(lst))
|
||||
return m.digest()
|
||||
|
||||
def h_fun(fun):
|
||||
try:
|
||||
return fun.code
|
||||
except AttributeError:
|
||||
try:
|
||||
h = inspect.getsource(fun)
|
||||
except IOError:
|
||||
h = "nocode"
|
||||
try:
|
||||
fun.code = h
|
||||
except AttributeError:
|
||||
pass
|
||||
return h
|
||||
|
||||
def pprint(col, str, label='', sep='\n'):
|
||||
"print messages in color"
|
||||
sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
|
||||
|
||||
def check_dir(path):
|
||||
"""If a folder doesn't exists, create it."""
|
||||
if not os.path.isdir(path):
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError, e:
|
||||
if not os.path.isdir(path):
|
||||
raise WafError("Cannot create the folder '%s' (error: %s)" % (path, e))
|
||||
|
||||
def cmd_output(cmd, **kw):
|
||||
|
||||
silent = False
|
||||
if 'silent' in kw:
|
||||
silent = kw['silent']
|
||||
del(kw['silent'])
|
||||
|
||||
if 'e' in kw:
|
||||
tmp = kw['e']
|
||||
del(kw['e'])
|
||||
kw['env'] = tmp
|
||||
|
||||
kw['shell'] = isinstance(cmd, str)
|
||||
kw['stdout'] = pproc.PIPE
|
||||
if silent:
|
||||
kw['stderr'] = pproc.PIPE
|
||||
|
||||
try:
|
||||
p = pproc.Popen(cmd, **kw)
|
||||
output = p.communicate()[0]
|
||||
except OSError, e:
|
||||
raise ValueError(str(e))
|
||||
|
||||
if p.returncode:
|
||||
if not silent:
|
||||
msg = "command execution failed: %s -> %r" % (cmd, str(output))
|
||||
raise ValueError(msg)
|
||||
output = ''
|
||||
return output
|
||||
|
||||
reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
|
||||
def subst_vars(expr, params):
|
||||
"substitute ${PREFIX}/bin in /usr/local/bin"
|
||||
def repl_var(m):
|
||||
if m.group(1):
|
||||
return '\\'
|
||||
if m.group(2):
|
||||
return '$'
|
||||
try:
|
||||
# environments may contain lists
|
||||
return params.get_flat(m.group(3))
|
||||
except AttributeError:
|
||||
return params[m.group(3)]
|
||||
return reg_subst.sub(repl_var, expr)
|
||||
|
||||
def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
|
||||
"infers the binary format from the unversioned_sys_platform name."
|
||||
|
||||
if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos', 'gnu'):
|
||||
return 'elf'
|
||||
elif unversioned_sys_platform == 'darwin':
|
||||
return 'mac-o'
|
||||
elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
|
||||
return 'pe'
|
||||
# TODO we assume all other operating systems are elf, which is not true.
|
||||
# we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
|
||||
return 'elf'
|
||||
|
||||
def unversioned_sys_platform():
|
||||
"""returns an unversioned name from sys.platform.
|
||||
sys.plaform is not very well defined and depends directly on the python source tree.
|
||||
The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
|
||||
i.e., it's possible to get freebsd7 on a freebsd8 system.
|
||||
So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
|
||||
Some possible values of sys.platform are, amongst others:
|
||||
aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
|
||||
generic gnu0 irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
|
||||
Investigating the python source tree may reveal more values.
|
||||
"""
|
||||
s = sys.platform
|
||||
if s == 'java':
|
||||
# The real OS is hidden under the JVM.
|
||||
from java.lang import System
|
||||
s = System.getProperty('os.name')
|
||||
# see http://lopica.sourceforge.net/os.html for a list of possible values
|
||||
if s == 'Mac OS X':
|
||||
return 'darwin'
|
||||
elif s.startswith('Windows '):
|
||||
return 'win32'
|
||||
elif s == 'OS/2':
|
||||
return 'os2'
|
||||
elif s == 'HP-UX':
|
||||
return 'hpux'
|
||||
elif s in ('SunOS', 'Solaris'):
|
||||
return 'sunos'
|
||||
else: s = s.lower()
|
||||
if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
|
||||
return re.split('\d+$', s)[0]
|
||||
|
||||
#@deprecated('use unversioned_sys_platform instead')
|
||||
def detect_platform():
|
||||
"""this function has been in the Utils module for some time.
|
||||
It's hard to guess what people have used it for.
|
||||
It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
|
||||
For example, the version is not removed on freebsd and netbsd, amongst others.
|
||||
"""
|
||||
s = sys.platform
|
||||
|
||||
# known POSIX
|
||||
for x in 'cygwin linux irix sunos hpux aix darwin gnu'.split():
|
||||
# sys.platform may be linux2
|
||||
if s.find(x) >= 0:
|
||||
return x
|
||||
|
||||
# unknown POSIX
|
||||
if os.name in 'posix java os2'.split():
|
||||
return os.name
|
||||
|
||||
return s
|
||||
|
||||
def load_tool(tool, tooldir=None):
|
||||
'''
|
||||
load_tool: import a Python module, optionally using several directories.
|
||||
@param tool [string]: name of tool to import.
|
||||
@param tooldir [list]: directories to look for the tool.
|
||||
@return: the loaded module.
|
||||
|
||||
Warning: this function is not thread-safe: plays with sys.path,
|
||||
so must run in sequence.
|
||||
'''
|
||||
if tooldir:
|
||||
assert isinstance(tooldir, list)
|
||||
sys.path = tooldir + sys.path
|
||||
else:
|
||||
tooldir = []
|
||||
try:
|
||||
return __import__(tool)
|
||||
finally:
|
||||
for dt in tooldir:
|
||||
sys.path.remove(dt)
|
||||
|
||||
def nada(*k, **kw):
|
||||
"""A function that does nothing"""
|
||||
pass
|
||||
|
||||
def diff_path(top, subdir):
|
||||
"""difference between two absolute paths"""
|
||||
top = os.path.normpath(top).replace('\\', '/').split('/')
|
||||
subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
|
||||
if len(top) == len(subdir): return ''
|
||||
diff = subdir[len(top) - len(subdir):]
|
||||
return os.path.join(*diff)
|
||||
|
||||
class Context(object):
|
||||
"""A base class for commands to be executed from Waf scripts"""
|
||||
|
||||
def set_curdir(self, dir):
|
||||
self.curdir_ = dir
|
||||
|
||||
def get_curdir(self):
|
||||
try:
|
||||
return self.curdir_
|
||||
except AttributeError:
|
||||
self.curdir_ = os.getcwd()
|
||||
return self.get_curdir()
|
||||
|
||||
curdir = property(get_curdir, set_curdir)
|
||||
|
||||
def recurse(self, dirs, name=''):
|
||||
"""The function for calling scripts from folders, it tries to call wscript + function_name
|
||||
and if that file does not exist, it will call the method 'function_name' from a file named wscript
|
||||
the dirs can be a list of folders or a string containing space-separated folder paths
|
||||
"""
|
||||
if not name:
|
||||
name = inspect.stack()[1][3]
|
||||
|
||||
if isinstance(dirs, str):
|
||||
dirs = to_list(dirs)
|
||||
|
||||
for x in dirs:
|
||||
if os.path.isabs(x):
|
||||
nexdir = x
|
||||
else:
|
||||
nexdir = os.path.join(self.curdir, x)
|
||||
|
||||
base = os.path.join(nexdir, WSCRIPT_FILE)
|
||||
file_path = base + '_' + name
|
||||
|
||||
try:
|
||||
txt = readf(file_path, m='rU')
|
||||
except (OSError, IOError):
|
||||
try:
|
||||
module = load_module(base)
|
||||
except OSError:
|
||||
raise WscriptError('No such script %s' % base)
|
||||
|
||||
try:
|
||||
f = module.__dict__[name]
|
||||
except KeyError:
|
||||
raise WscriptError('No function %s defined in %s' % (name, base))
|
||||
|
||||
if getattr(self.__class__, 'pre_recurse', None):
|
||||
self.pre_recurse(f, base, nexdir)
|
||||
old = self.curdir
|
||||
self.curdir = nexdir
|
||||
try:
|
||||
f(self)
|
||||
finally:
|
||||
self.curdir = old
|
||||
if getattr(self.__class__, 'post_recurse', None):
|
||||
self.post_recurse(module, base, nexdir)
|
||||
else:
|
||||
dc = {'ctx': self}
|
||||
if getattr(self.__class__, 'pre_recurse', None):
|
||||
dc = self.pre_recurse(txt, file_path, nexdir)
|
||||
old = self.curdir
|
||||
self.curdir = nexdir
|
||||
try:
|
||||
try:
|
||||
exec(compile(txt, file_path, 'exec'), dc)
|
||||
except Exception:
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
|
||||
finally:
|
||||
self.curdir = old
|
||||
if getattr(self.__class__, 'post_recurse', None):
|
||||
self.post_recurse(txt, file_path, nexdir)
|
||||
|
||||
if is_win32:
|
||||
old = shutil.copy2
|
||||
def copy2(src, dst):
|
||||
old(src, dst)
|
||||
shutil.copystat(src, src)
|
||||
setattr(shutil, 'copy2', copy2)
|
||||
|
||||
def zip_folder(dir, zip_file_name, prefix):
|
||||
"""
|
||||
prefix represents the app to add in the archive
|
||||
"""
|
||||
import zipfile
|
||||
zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
|
||||
base = os.path.abspath(dir)
|
||||
|
||||
if prefix:
|
||||
if prefix[-1] != os.sep:
|
||||
prefix += os.sep
|
||||
|
||||
n = len(base)
|
||||
for root, dirs, files in os.walk(base):
|
||||
for f in files:
|
||||
archive_name = prefix + root[n:] + os.sep + f
|
||||
zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
|
||||
zip.close()
|
||||
|
||||
def get_elapsed_time(start):
|
||||
"Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
|
||||
delta = datetime.datetime.now() - start
|
||||
# cast to int necessary for python 3.0
|
||||
days = int(delta.days)
|
||||
hours = int(delta.seconds / 3600)
|
||||
minutes = int((delta.seconds - hours * 3600) / 60)
|
||||
seconds = delta.seconds - hours * 3600 - minutes * 60 \
|
||||
+ float(delta.microseconds) / 1000 / 1000
|
||||
result = ''
|
||||
if days:
|
||||
result += '%dd' % days
|
||||
if days or hours:
|
||||
result += '%dh' % hours
|
||||
if days or hours or minutes:
|
||||
result += '%dm' % minutes
|
||||
return '%s%.3fs' % (result, seconds)
|
||||
|
||||
if os.name == 'java':
|
||||
# For Jython (they should really fix the inconsistency)
|
||||
try:
|
||||
gc.disable()
|
||||
gc.enable()
|
||||
except NotImplementedError:
|
||||
gc.disable = gc.enable
|
||||
|
||||
def run_once(fun):
|
||||
"""
|
||||
decorator, make a function cache its results, use like this:
|
||||
|
||||
@run_once
|
||||
def foo(k):
|
||||
return 345*2343
|
||||
"""
|
||||
cache = {}
|
||||
def wrap(k):
|
||||
try:
|
||||
return cache[k]
|
||||
except KeyError:
|
||||
ret = fun(k)
|
||||
cache[k] = ret
|
||||
return ret
|
||||
wrap.__cache__ = cache
|
||||
return wrap
|
3
third_party/waf/wafadmin/__init__.py
vendored
3
third_party/waf/wafadmin/__init__.py
vendored
@ -1,3 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005 (ita)
|
235
third_party/waf/wafadmin/ansiterm.py
vendored
235
third_party/waf/wafadmin/ansiterm.py
vendored
@ -1,235 +0,0 @@
|
||||
import sys, os
|
||||
try:
|
||||
if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
|
||||
raise ValueError('not a tty')
|
||||
|
||||
from ctypes import *
|
||||
|
||||
class COORD(Structure):
|
||||
_fields_ = [("X", c_short), ("Y", c_short)]
|
||||
|
||||
class SMALL_RECT(Structure):
|
||||
_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
|
||||
|
||||
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
|
||||
_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
|
||||
|
||||
class CONSOLE_CURSOR_INFO(Structure):
|
||||
_fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
|
||||
|
||||
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
|
||||
csinfo = CONSOLE_CURSOR_INFO()
|
||||
hconsole = windll.kernel32.GetStdHandle(-11)
|
||||
windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
|
||||
if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
|
||||
windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
import re, threading
|
||||
|
||||
to_int = lambda number, default: number and int(number) or default
|
||||
wlock = threading.Lock()
|
||||
|
||||
STD_OUTPUT_HANDLE = -11
|
||||
STD_ERROR_HANDLE = -12
|
||||
|
||||
class AnsiTerm(object):
|
||||
def __init__(self):
|
||||
self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
|
||||
self.cursor_history = []
|
||||
self.orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
|
||||
self.orig_csinfo = CONSOLE_CURSOR_INFO()
|
||||
windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self.orig_sbinfo))
|
||||
windll.kernel32.GetConsoleCursorInfo(hconsole, byref(self.orig_csinfo))
|
||||
|
||||
|
||||
def screen_buffer_info(self):
|
||||
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
|
||||
windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
|
||||
return sbinfo
|
||||
|
||||
def clear_line(self, param):
|
||||
mode = param and int(param) or 0
|
||||
sbinfo = self.screen_buffer_info()
|
||||
if mode == 1: # Clear from begining of line to cursor position
|
||||
line_start = COORD(0, sbinfo.CursorPosition.Y)
|
||||
line_length = sbinfo.Size.X
|
||||
elif mode == 2: # Clear entire line
|
||||
line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
|
||||
line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
|
||||
else: # Clear from cursor position to end of line
|
||||
line_start = sbinfo.CursorPosition
|
||||
line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
|
||||
chars_written = c_int()
|
||||
windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), line_length, line_start, byref(chars_written))
|
||||
windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
|
||||
|
||||
def clear_screen(self, param):
|
||||
mode = to_int(param, 0)
|
||||
sbinfo = self.screen_buffer_info()
|
||||
if mode == 1: # Clear from begining of screen to cursor position
|
||||
clear_start = COORD(0, 0)
|
||||
clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
|
||||
elif mode == 2: # Clear entire screen and return cursor to home
|
||||
clear_start = COORD(0, 0)
|
||||
clear_length = sbinfo.Size.X * sbinfo.Size.Y
|
||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
|
||||
else: # Clear from cursor position to end of screen
|
||||
clear_start = sbinfo.CursorPosition
|
||||
clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
|
||||
chars_written = c_int()
|
||||
windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
|
||||
windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
|
||||
|
||||
def push_cursor(self, param):
|
||||
sbinfo = self.screen_buffer_info()
|
||||
self.cursor_history.push(sbinfo.CursorPosition)
|
||||
|
||||
def pop_cursor(self, param):
|
||||
if self.cursor_history:
|
||||
old_pos = self.cursor_history.pop()
|
||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
|
||||
|
||||
def set_cursor(self, param):
|
||||
x, sep, y = param.partition(';')
|
||||
x = to_int(x, 1) - 1
|
||||
y = to_int(y, 1) - 1
|
||||
sbinfo = self.screen_buffer_info()
|
||||
new_pos = COORD(
|
||||
min(max(0, x), sbinfo.Size.X),
|
||||
min(max(0, y), sbinfo.Size.Y)
|
||||
)
|
||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
|
||||
|
||||
def set_column(self, param):
|
||||
x = to_int(param, 1) - 1
|
||||
sbinfo = self.screen_buffer_info()
|
||||
new_pos = COORD(
|
||||
min(max(0, x), sbinfo.Size.X),
|
||||
sbinfo.CursorPosition.Y
|
||||
)
|
||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
|
||||
|
||||
def move_cursor(self, x_offset=0, y_offset=0):
|
||||
sbinfo = self.screen_buffer_info()
|
||||
new_pos = COORD(
|
||||
min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
|
||||
min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
|
||||
)
|
||||
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
|
||||
|
||||
def move_up(self, param):
|
||||
self.move_cursor(y_offset = -to_int(param, 1))
|
||||
|
||||
def move_down(self, param):
|
||||
self.move_cursor(y_offset = to_int(param, 1))
|
||||
|
||||
def move_left(self, param):
|
||||
self.move_cursor(x_offset = -to_int(param, 1))
|
||||
|
||||
def move_right(self, param):
|
||||
self.move_cursor(x_offset = to_int(param, 1))
|
||||
|
||||
def next_line(self, param):
|
||||
sbinfo = self.screen_buffer_info()
|
||||
self.move_cursor(
|
||||
x_offset = -sbinfo.CursorPosition.X,
|
||||
y_offset = to_int(param, 1)
|
||||
)
|
||||
|
||||
def prev_line(self, param):
|
||||
sbinfo = self.screen_buffer_info()
|
||||
self.move_cursor(
|
||||
x_offset = -sbinfo.CursorPosition.X,
|
||||
y_offset = -to_int(param, 1)
|
||||
)
|
||||
|
||||
escape_to_color = { (0, 30): 0x0, #black
|
||||
(0, 31): 0x4, #red
|
||||
(0, 32): 0x2, #green
|
||||
(0, 33): 0x4+0x2, #dark yellow
|
||||
(0, 34): 0x1, #blue
|
||||
(0, 35): 0x1+0x4, #purple
|
||||
(0, 36): 0x2+0x4, #cyan
|
||||
(0, 37): 0x1+0x2+0x4, #grey
|
||||
(1, 30): 0x1+0x2+0x4, #dark gray
|
||||
(1, 31): 0x4+0x8, #red
|
||||
(1, 32): 0x2+0x8, #light green
|
||||
(1, 33): 0x4+0x2+0x8, #yellow
|
||||
(1, 34): 0x1+0x8, #light blue
|
||||
(1, 35): 0x1+0x4+0x8, #light purple
|
||||
(1, 36): 0x1+0x2+0x8, #light cyan
|
||||
(1, 37): 0x1+0x2+0x4+0x8, #white
|
||||
}
|
||||
|
||||
def set_color(self, param):
|
||||
cols = param.split(';')
|
||||
attr = self.orig_sbinfo.Attributes
|
||||
for c in cols:
|
||||
c = to_int(c, 0)
|
||||
if c in range(30,38):
|
||||
attr = (attr & 0xf0) | (self.escape_to_color.get((0,c), 0x7))
|
||||
elif c in range(40,48):
|
||||
attr = (attr & 0x0f) | (self.escape_to_color.get((0,c), 0x7) << 8)
|
||||
elif c in range(90,98):
|
||||
attr = (attr & 0xf0) | (self.escape_to_color.get((1,c-60), 0x7))
|
||||
elif c in range(100,108):
|
||||
attr = (attr & 0x0f) | (self.escape_to_color.get((1,c-60), 0x7) << 8)
|
||||
elif c == 1:
|
||||
attr |= 0x08
|
||||
windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
|
||||
|
||||
def show_cursor(self,param):
|
||||
csinfo.bVisible = 1
|
||||
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
|
||||
|
||||
def hide_cursor(self,param):
|
||||
csinfo.bVisible = 0
|
||||
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
|
||||
|
||||
ansi_command_table = {
|
||||
'A': move_up,
|
||||
'B': move_down,
|
||||
'C': move_right,
|
||||
'D': move_left,
|
||||
'E': next_line,
|
||||
'F': prev_line,
|
||||
'G': set_column,
|
||||
'H': set_cursor,
|
||||
'f': set_cursor,
|
||||
'J': clear_screen,
|
||||
'K': clear_line,
|
||||
'h': show_cursor,
|
||||
'l': hide_cursor,
|
||||
'm': set_color,
|
||||
's': push_cursor,
|
||||
'u': pop_cursor,
|
||||
}
|
||||
# Match either the escape sequence or text not containing escape sequence
|
||||
ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
|
||||
def write(self, text):
|
||||
try:
|
||||
wlock.acquire()
|
||||
for param, cmd, txt in self.ansi_tokans.findall(text):
|
||||
if cmd:
|
||||
cmd_func = self.ansi_command_table.get(cmd)
|
||||
if cmd_func:
|
||||
cmd_func(self, param)
|
||||
else:
|
||||
chars_written = c_int()
|
||||
if isinstance(txt, unicode):
|
||||
windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
|
||||
else:
|
||||
windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
|
||||
finally:
|
||||
wlock.release()
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
def isatty(self):
|
||||
return True
|
||||
|
||||
sys.stderr = sys.stdout = AnsiTerm()
|
||||
os.environ['TERM'] = 'vt100'
|
619
third_party/waf/wafadmin/pproc.py
vendored
619
third_party/waf/wafadmin/pproc.py
vendored
@ -1,619 +0,0 @@
|
||||
# borrowed from python 2.5.2c1
|
||||
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
import sys
|
||||
mswindows = (sys.platform == "win32")
|
||||
|
||||
import os
|
||||
import types
|
||||
import traceback
|
||||
import gc
|
||||
|
||||
class CalledProcessError(Exception):
|
||||
def __init__(self, returncode, cmd):
|
||||
self.returncode = returncode
|
||||
self.cmd = cmd
|
||||
def __str__(self):
|
||||
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
|
||||
|
||||
if mswindows:
|
||||
import threading
|
||||
import msvcrt
|
||||
if 0:
|
||||
import pywintypes
|
||||
from win32api import GetStdHandle, STD_INPUT_HANDLE, \
|
||||
STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
|
||||
from win32api import GetCurrentProcess, DuplicateHandle, \
|
||||
GetModuleFileName, GetVersion
|
||||
from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
|
||||
from win32pipe import CreatePipe
|
||||
from win32process import CreateProcess, STARTUPINFO, \
|
||||
GetExitCodeProcess, STARTF_USESTDHANDLES, \
|
||||
STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
|
||||
from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
|
||||
else:
|
||||
from _subprocess import *
|
||||
class STARTUPINFO:
|
||||
dwFlags = 0
|
||||
hStdInput = None
|
||||
hStdOutput = None
|
||||
hStdError = None
|
||||
wShowWindow = 0
|
||||
class pywintypes:
|
||||
error = IOError
|
||||
else:
|
||||
import select
|
||||
import errno
|
||||
import fcntl
|
||||
import pickle
|
||||
|
||||
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
|
||||
|
||||
try:
|
||||
MAXFD = os.sysconf("SC_OPEN_MAX")
|
||||
except:
|
||||
MAXFD = 256
|
||||
|
||||
try:
|
||||
False
|
||||
except NameError:
|
||||
False = 0
|
||||
True = 1
|
||||
|
||||
_active = []
|
||||
|
||||
def _cleanup():
|
||||
for inst in _active[:]:
|
||||
if inst.poll(_deadstate=sys.maxint) >= 0:
|
||||
try:
|
||||
_active.remove(inst)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
PIPE = -1
|
||||
STDOUT = -2
|
||||
|
||||
|
||||
def call(*popenargs, **kwargs):
|
||||
return Popen(*popenargs, **kwargs).wait()
|
||||
|
||||
def check_call(*popenargs, **kwargs):
|
||||
retcode = call(*popenargs, **kwargs)
|
||||
cmd = kwargs.get("args")
|
||||
if cmd is None:
|
||||
cmd = popenargs[0]
|
||||
if retcode:
|
||||
raise CalledProcessError(retcode, cmd)
|
||||
return retcode
|
||||
|
||||
|
||||
def list2cmdline(seq):
|
||||
result = []
|
||||
needquote = False
|
||||
for arg in seq:
|
||||
bs_buf = []
|
||||
|
||||
if result:
|
||||
result.append(' ')
|
||||
|
||||
needquote = (" " in arg) or ("\t" in arg) or arg == ""
|
||||
if needquote:
|
||||
result.append('"')
|
||||
|
||||
for c in arg:
|
||||
if c == '\\':
|
||||
bs_buf.append(c)
|
||||
elif c == '"':
|
||||
result.append('\\' * len(bs_buf)*2)
|
||||
bs_buf = []
|
||||
result.append('\\"')
|
||||
else:
|
||||
if bs_buf:
|
||||
result.extend(bs_buf)
|
||||
bs_buf = []
|
||||
result.append(c)
|
||||
|
||||
if bs_buf:
|
||||
result.extend(bs_buf)
|
||||
|
||||
if needquote:
|
||||
result.extend(bs_buf)
|
||||
result.append('"')
|
||||
|
||||
return ''.join(result)
|
||||
|
||||
class Popen(object):
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0):
|
||||
_cleanup()
|
||||
|
||||
self._child_created = False
|
||||
if not isinstance(bufsize, (int, long)):
|
||||
raise TypeError("bufsize must be an integer")
|
||||
|
||||
if mswindows:
|
||||
if preexec_fn is not None:
|
||||
raise ValueError("preexec_fn is not supported on Windows platforms")
|
||||
if close_fds:
|
||||
raise ValueError("close_fds is not supported on Windows platforms")
|
||||
else:
|
||||
if startupinfo is not None:
|
||||
raise ValueError("startupinfo is only supported on Windows platforms")
|
||||
if creationflags != 0:
|
||||
raise ValueError("creationflags is only supported on Windows platforms")
|
||||
|
||||
self.stdin = None
|
||||
self.stdout = None
|
||||
self.stderr = None
|
||||
self.pid = None
|
||||
self.returncode = None
|
||||
self.universal_newlines = universal_newlines
|
||||
|
||||
(p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
|
||||
|
||||
self._execute_child(args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines,
|
||||
startupinfo, creationflags, shell,
|
||||
p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite)
|
||||
|
||||
if mswindows:
|
||||
if stdin is None and p2cwrite is not None:
|
||||
os.close(p2cwrite)
|
||||
p2cwrite = None
|
||||
if stdout is None and c2pread is not None:
|
||||
os.close(c2pread)
|
||||
c2pread = None
|
||||
if stderr is None and errread is not None:
|
||||
os.close(errread)
|
||||
errread = None
|
||||
|
||||
if p2cwrite:
|
||||
self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
|
||||
if c2pread:
|
||||
if universal_newlines:
|
||||
self.stdout = os.fdopen(c2pread, 'rU', bufsize)
|
||||
else:
|
||||
self.stdout = os.fdopen(c2pread, 'rb', bufsize)
|
||||
if errread:
|
||||
if universal_newlines:
|
||||
self.stderr = os.fdopen(errread, 'rU', bufsize)
|
||||
else:
|
||||
self.stderr = os.fdopen(errread, 'rb', bufsize)
|
||||
|
||||
|
||||
def _translate_newlines(self, data):
|
||||
data = data.replace("\r\n", "\n")
|
||||
data = data.replace("\r", "\n")
|
||||
return data
|
||||
|
||||
|
||||
def __del__(self, sys=sys):
|
||||
if not self._child_created:
|
||||
return
|
||||
self.poll(_deadstate=sys.maxint)
|
||||
if self.returncode is None and _active is not None:
|
||||
_active.append(self)
|
||||
|
||||
|
||||
def communicate(self, input=None):
|
||||
if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
|
||||
stdout = None
|
||||
stderr = None
|
||||
if self.stdin:
|
||||
if input:
|
||||
self.stdin.write(input)
|
||||
self.stdin.close()
|
||||
elif self.stdout:
|
||||
stdout = self.stdout.read()
|
||||
elif self.stderr:
|
||||
stderr = self.stderr.read()
|
||||
self.wait()
|
||||
return (stdout, stderr)
|
||||
|
||||
return self._communicate(input)
|
||||
|
||||
|
||||
if mswindows:
|
||||
def _get_handles(self, stdin, stdout, stderr):
|
||||
if stdin is None and stdout is None and stderr is None:
|
||||
return (None, None, None, None, None, None)
|
||||
|
||||
p2cread, p2cwrite = None, None
|
||||
c2pread, c2pwrite = None, None
|
||||
errread, errwrite = None, None
|
||||
|
||||
if stdin is None:
|
||||
p2cread = GetStdHandle(STD_INPUT_HANDLE)
|
||||
if p2cread is not None:
|
||||
pass
|
||||
elif stdin is None or stdin == PIPE:
|
||||
p2cread, p2cwrite = CreatePipe(None, 0)
|
||||
p2cwrite = p2cwrite.Detach()
|
||||
p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
|
||||
elif isinstance(stdin, int):
|
||||
p2cread = msvcrt.get_osfhandle(stdin)
|
||||
else:
|
||||
p2cread = msvcrt.get_osfhandle(stdin.fileno())
|
||||
p2cread = self._make_inheritable(p2cread)
|
||||
|
||||
if stdout is None:
|
||||
c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
|
||||
if c2pwrite is not None:
|
||||
pass
|
||||
elif stdout is None or stdout == PIPE:
|
||||
c2pread, c2pwrite = CreatePipe(None, 0)
|
||||
c2pread = c2pread.Detach()
|
||||
c2pread = msvcrt.open_osfhandle(c2pread, 0)
|
||||
elif isinstance(stdout, int):
|
||||
c2pwrite = msvcrt.get_osfhandle(stdout)
|
||||
else:
|
||||
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
|
||||
c2pwrite = self._make_inheritable(c2pwrite)
|
||||
|
||||
if stderr is None:
|
||||
errwrite = GetStdHandle(STD_ERROR_HANDLE)
|
||||
if errwrite is not None:
|
||||
pass
|
||||
elif stderr is None or stderr == PIPE:
|
||||
errread, errwrite = CreatePipe(None, 0)
|
||||
errread = errread.Detach()
|
||||
errread = msvcrt.open_osfhandle(errread, 0)
|
||||
elif stderr == STDOUT:
|
||||
errwrite = c2pwrite
|
||||
elif isinstance(stderr, int):
|
||||
errwrite = msvcrt.get_osfhandle(stderr)
|
||||
else:
|
||||
errwrite = msvcrt.get_osfhandle(stderr.fileno())
|
||||
errwrite = self._make_inheritable(errwrite)
|
||||
|
||||
return (p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite)
|
||||
def _make_inheritable(self, handle):
|
||||
return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
|
||||
|
||||
def _find_w9xpopen(self):
|
||||
w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
|
||||
if not os.path.exists(w9xpopen):
|
||||
w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
|
||||
if not os.path.exists(w9xpopen):
|
||||
raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
|
||||
return w9xpopen
|
||||
|
||||
def _execute_child(self, args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines,
|
||||
startupinfo, creationflags, shell,
|
||||
p2cread, p2cwrite,
|
||||
c2pread, c2pwrite,
|
||||
errread, errwrite):
|
||||
|
||||
if not isinstance(args, types.StringTypes):
|
||||
args = list2cmdline(args)
|
||||
|
||||
if startupinfo is None:
|
||||
startupinfo = STARTUPINFO()
|
||||
if None not in (p2cread, c2pwrite, errwrite):
|
||||
startupinfo.dwFlags |= STARTF_USESTDHANDLES
|
||||
startupinfo.hStdInput = p2cread
|
||||
startupinfo.hStdOutput = c2pwrite
|
||||
startupinfo.hStdError = errwrite
|
||||
|
||||
if shell:
|
||||
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
|
||||
startupinfo.wShowWindow = SW_HIDE
|
||||
comspec = os.environ.get("COMSPEC", "cmd.exe")
|
||||
args = comspec + " /c " + args
|
||||
if (GetVersion() >= 0x80000000L or
|
||||
os.path.basename(comspec).lower() == "command.com"):
|
||||
w9xpopen = self._find_w9xpopen()
|
||||
args = '"%s" %s' % (w9xpopen, args)
|
||||
creationflags |= CREATE_NEW_CONSOLE
|
||||
|
||||
try:
|
||||
hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
|
||||
except pywintypes.error, e:
|
||||
raise WindowsError(*e.args)
|
||||
|
||||
self._child_created = True
|
||||
self._handle = hp
|
||||
self.pid = pid
|
||||
ht.Close()
|
||||
|
||||
if p2cread is not None:
|
||||
p2cread.Close()
|
||||
if c2pwrite is not None:
|
||||
c2pwrite.Close()
|
||||
if errwrite is not None:
|
||||
errwrite.Close()
|
||||
|
||||
|
||||
def poll(self, _deadstate=None):
|
||||
if self.returncode is None:
|
||||
if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
|
||||
self.returncode = GetExitCodeProcess(self._handle)
|
||||
return self.returncode
|
||||
|
||||
|
||||
def wait(self):
|
||||
if self.returncode is None:
|
||||
obj = WaitForSingleObject(self._handle, INFINITE)
|
||||
self.returncode = GetExitCodeProcess(self._handle)
|
||||
return self.returncode
|
||||
|
||||
def _readerthread(self, fh, buffer):
|
||||
buffer.append(fh.read())
|
||||
|
||||
def _communicate(self, input):
|
||||
stdout = None
|
||||
stderr = None
|
||||
|
||||
if self.stdout:
|
||||
stdout = []
|
||||
stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
|
||||
stdout_thread.setDaemon(True)
|
||||
stdout_thread.start()
|
||||
if self.stderr:
|
||||
stderr = []
|
||||
stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
|
||||
stderr_thread.setDaemon(True)
|
||||
stderr_thread.start()
|
||||
|
||||
if self.stdin:
|
||||
if input is not None:
|
||||
self.stdin.write(input)
|
||||
self.stdin.close()
|
||||
|
||||
if self.stdout:
|
||||
stdout_thread.join()
|
||||
if self.stderr:
|
||||
stderr_thread.join()
|
||||
|
||||
if stdout is not None:
|
||||
stdout = stdout[0]
|
||||
if stderr is not None:
|
||||
stderr = stderr[0]
|
||||
|
||||
if self.universal_newlines and hasattr(file, 'newlines'):
|
||||
if stdout:
|
||||
stdout = self._translate_newlines(stdout)
|
||||
if stderr:
|
||||
stderr = self._translate_newlines(stderr)
|
||||
|
||||
self.wait()
|
||||
return (stdout, stderr)
|
||||
|
||||
else:
|
||||
def _get_handles(self, stdin, stdout, stderr):
|
||||
p2cread, p2cwrite = None, None
|
||||
c2pread, c2pwrite = None, None
|
||||
errread, errwrite = None, None
|
||||
|
||||
if stdin is None:
|
||||
pass
|
||||
elif stdin == PIPE:
|
||||
p2cread, p2cwrite = os.pipe()
|
||||
elif isinstance(stdin, int):
|
||||
p2cread = stdin
|
||||
else:
|
||||
p2cread = stdin.fileno()
|
||||
|
||||
if stdout is None:
|
||||
pass
|
||||
elif stdout == PIPE:
|
||||
c2pread, c2pwrite = os.pipe()
|
||||
elif isinstance(stdout, int):
|
||||
c2pwrite = stdout
|
||||
else:
|
||||
c2pwrite = stdout.fileno()
|
||||
|
||||
if stderr is None:
|
||||
pass
|
||||
elif stderr == PIPE:
|
||||
errread, errwrite = os.pipe()
|
||||
elif stderr == STDOUT:
|
||||
errwrite = c2pwrite
|
||||
elif isinstance(stderr, int):
|
||||
errwrite = stderr
|
||||
else:
|
||||
errwrite = stderr.fileno()
|
||||
|
||||
return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
|
||||
|
||||
def _set_cloexec_flag(self, fd):
|
||||
try:
|
||||
cloexec_flag = fcntl.FD_CLOEXEC
|
||||
except AttributeError:
|
||||
cloexec_flag = 1
|
||||
|
||||
old = fcntl.fcntl(fd, fcntl.F_GETFD)
|
||||
fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
|
||||
|
||||
def _close_fds(self, but):
|
||||
for i in xrange(3, MAXFD):
|
||||
if i == but:
|
||||
continue
|
||||
try:
|
||||
os.close(i)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _execute_child(self, args, executable, preexec_fn, close_fds,
|
||||
cwd, env, universal_newlines, startupinfo, creationflags, shell,
|
||||
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
|
||||
|
||||
if isinstance(args, types.StringTypes):
|
||||
args = [args]
|
||||
else:
|
||||
args = list(args)
|
||||
|
||||
if shell:
|
||||
args = ["/bin/sh", "-c"] + args
|
||||
|
||||
if executable is None:
|
||||
executable = args[0]
|
||||
|
||||
errpipe_read, errpipe_write = os.pipe()
|
||||
self._set_cloexec_flag(errpipe_write)
|
||||
|
||||
gc_was_enabled = gc.isenabled()
|
||||
gc.disable()
|
||||
try:
|
||||
self.pid = os.fork()
|
||||
except:
|
||||
if gc_was_enabled:
|
||||
gc.enable()
|
||||
raise
|
||||
self._child_created = True
|
||||
if self.pid == 0:
|
||||
try:
|
||||
if p2cwrite:
|
||||
os.close(p2cwrite)
|
||||
if c2pread:
|
||||
os.close(c2pread)
|
||||
if errread:
|
||||
os.close(errread)
|
||||
os.close(errpipe_read)
|
||||
|
||||
if p2cread:
|
||||
os.dup2(p2cread, 0)
|
||||
if c2pwrite:
|
||||
os.dup2(c2pwrite, 1)
|
||||
if errwrite:
|
||||
os.dup2(errwrite, 2)
|
||||
|
||||
if p2cread and p2cread not in (0,):
|
||||
os.close(p2cread)
|
||||
if c2pwrite and c2pwrite not in (p2cread, 1):
|
||||
os.close(c2pwrite)
|
||||
if errwrite and errwrite not in (p2cread, c2pwrite, 2):
|
||||
os.close(errwrite)
|
||||
|
||||
if close_fds:
|
||||
self._close_fds(but=errpipe_write)
|
||||
|
||||
if cwd is not None:
|
||||
os.chdir(cwd)
|
||||
|
||||
if preexec_fn:
|
||||
apply(preexec_fn)
|
||||
|
||||
if env is None:
|
||||
os.execvp(executable, args)
|
||||
else:
|
||||
os.execvpe(executable, args, env)
|
||||
|
||||
except:
|
||||
exc_type, exc_value, tb = sys.exc_info()
|
||||
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
|
||||
exc_value.child_traceback = ''.join(exc_lines)
|
||||
os.write(errpipe_write, pickle.dumps(exc_value))
|
||||
|
||||
os._exit(255)
|
||||
|
||||
if gc_was_enabled:
|
||||
gc.enable()
|
||||
os.close(errpipe_write)
|
||||
if p2cread and p2cwrite:
|
||||
os.close(p2cread)
|
||||
if c2pwrite and c2pread:
|
||||
os.close(c2pwrite)
|
||||
if errwrite and errread:
|
||||
os.close(errwrite)
|
||||
|
||||
data = os.read(errpipe_read, 1048576)
|
||||
os.close(errpipe_read)
|
||||
if data != "":
|
||||
os.waitpid(self.pid, 0)
|
||||
child_exception = pickle.loads(data)
|
||||
raise child_exception
|
||||
|
||||
def _handle_exitstatus(self, sts):
|
||||
if os.WIFSIGNALED(sts):
|
||||
self.returncode = -os.WTERMSIG(sts)
|
||||
elif os.WIFEXITED(sts):
|
||||
self.returncode = os.WEXITSTATUS(sts)
|
||||
else:
|
||||
raise RuntimeError("Unknown child exit status!")
|
||||
|
||||
def poll(self, _deadstate=None):
|
||||
if self.returncode is None:
|
||||
try:
|
||||
pid, sts = os.waitpid(self.pid, os.WNOHANG)
|
||||
if pid == self.pid:
|
||||
self._handle_exitstatus(sts)
|
||||
except os.error:
|
||||
if _deadstate is not None:
|
||||
self.returncode = _deadstate
|
||||
return self.returncode
|
||||
|
||||
def wait(self):
|
||||
if self.returncode is None:
|
||||
pid, sts = os.waitpid(self.pid, 0)
|
||||
self._handle_exitstatus(sts)
|
||||
return self.returncode
|
||||
|
||||
def _communicate(self, input):
|
||||
read_set = []
|
||||
write_set = []
|
||||
stdout = None
|
||||
stderr = None
|
||||
|
||||
if self.stdin:
|
||||
self.stdin.flush()
|
||||
if input:
|
||||
write_set.append(self.stdin)
|
||||
else:
|
||||
self.stdin.close()
|
||||
if self.stdout:
|
||||
read_set.append(self.stdout)
|
||||
stdout = []
|
||||
if self.stderr:
|
||||
read_set.append(self.stderr)
|
||||
stderr = []
|
||||
|
||||
input_offset = 0
|
||||
while read_set or write_set:
|
||||
rlist, wlist, xlist = select.select(read_set, write_set, [])
|
||||
|
||||
if self.stdin in wlist:
|
||||
bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
|
||||
input_offset += bytes_written
|
||||
if input_offset >= len(input):
|
||||
self.stdin.close()
|
||||
write_set.remove(self.stdin)
|
||||
|
||||
if self.stdout in rlist:
|
||||
data = os.read(self.stdout.fileno(), 1024)
|
||||
if data == "":
|
||||
self.stdout.close()
|
||||
read_set.remove(self.stdout)
|
||||
stdout.append(data)
|
||||
|
||||
if self.stderr in rlist:
|
||||
data = os.read(self.stderr.fileno(), 1024)
|
||||
if data == "":
|
||||
self.stderr.close()
|
||||
read_set.remove(self.stderr)
|
||||
stderr.append(data)
|
||||
|
||||
if stdout is not None:
|
||||
stdout = ''.join(stdout)
|
||||
if stderr is not None:
|
||||
stderr = ''.join(stderr)
|
||||
|
||||
if self.universal_newlines and hasattr(file, 'newlines'):
|
||||
if stdout:
|
||||
stdout = self._translate_newlines(stdout)
|
||||
if stderr:
|
||||
stderr = self._translate_newlines(stderr)
|
||||
|
||||
self.wait()
|
||||
return (stdout, stderr)
|
129
third_party/waf/wafadmin/py3kfixes.py
vendored
129
third_party/waf/wafadmin/py3kfixes.py
vendored
@ -1,129 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2009 (ita)
|
||||
|
||||
"""
|
||||
Fixes for py3k go here
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
all_modifs = {}
|
||||
|
||||
def modif(dir, name, fun):
|
||||
if name == '*':
|
||||
lst = []
|
||||
for y in '. Tools 3rdparty'.split():
|
||||
for x in os.listdir(os.path.join(dir, y)):
|
||||
if x.endswith('.py'):
|
||||
lst.append(y + os.sep + x)
|
||||
#lst = [y + os.sep + x for x in os.listdir(os.path.join(dir, y)) for y in '. Tools 3rdparty'.split() if x.endswith('.py')]
|
||||
for x in lst:
|
||||
modif(dir, x, fun)
|
||||
return
|
||||
|
||||
filename = os.path.join(dir, name)
|
||||
f = open(filename, 'r')
|
||||
txt = f.read()
|
||||
f.close()
|
||||
|
||||
txt = fun(txt)
|
||||
|
||||
f = open(filename, 'w')
|
||||
f.write(txt)
|
||||
f.close()
|
||||
|
||||
def subst(filename):
|
||||
def do_subst(fun):
|
||||
global all_modifs
|
||||
try:
|
||||
all_modifs[filename] += fun
|
||||
except KeyError:
|
||||
all_modifs[filename] = [fun]
|
||||
return fun
|
||||
return do_subst
|
||||
|
||||
@subst('Constants.py')
|
||||
def r1(code):
|
||||
code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
|
||||
code = code.replace("ABI=7", "ABI=37")
|
||||
return code
|
||||
|
||||
@subst('Tools/ccroot.py')
|
||||
def r2(code):
|
||||
code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
|
||||
code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
|
||||
return code
|
||||
|
||||
@subst('Utils.py')
|
||||
def r3(code):
|
||||
code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
|
||||
code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
|
||||
return code
|
||||
|
||||
@subst('ansiterm.py')
|
||||
def r33(code):
|
||||
code = code.replace('unicode', 'str')
|
||||
return code
|
||||
|
||||
@subst('Task.py')
|
||||
def r4(code):
|
||||
code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
|
||||
code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
|
||||
code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
|
||||
code = code.replace("up(x.name)", "up(x.name.encode())")
|
||||
code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
|
||||
code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
|
||||
code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
|
||||
code = code.replace("os.path.join(Options.cache_global,ssig)", "os.path.join(Options.cache_global,ssig.decode())")
|
||||
return code
|
||||
|
||||
@subst('Build.py')
|
||||
def r5(code):
|
||||
code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
|
||||
code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
|
||||
return code
|
||||
|
||||
@subst('*')
|
||||
def r6(code):
|
||||
code = code.replace('xrange', 'range')
|
||||
code = code.replace('iteritems', 'items')
|
||||
code = code.replace('maxint', 'maxsize')
|
||||
code = code.replace('iterkeys', 'keys')
|
||||
code = code.replace('Error,e:', 'Error as e:')
|
||||
code = code.replace('Exception,e:', 'Exception as e:')
|
||||
return code
|
||||
|
||||
@subst('TaskGen.py')
|
||||
def r7(code):
|
||||
code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
|
||||
return code
|
||||
|
||||
@subst('Tools/python.py')
|
||||
def r8(code):
|
||||
code = code.replace('proc.communicate()[0]', 'proc.communicate()[0].decode("utf-8")')
|
||||
return code
|
||||
|
||||
@subst('Tools/glib2.py')
|
||||
def r9(code):
|
||||
code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
|
||||
return code
|
||||
|
||||
@subst('Tools/config_c.py')
|
||||
def r10(code):
|
||||
code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
|
||||
code = code.replace('out=str(out)','out=out.decode("utf-8")')
|
||||
code = code.replace('err=str(err)','err=err.decode("utf-8")')
|
||||
return code
|
||||
|
||||
@subst('Tools/d.py')
|
||||
def r11(code):
|
||||
code = code.replace('ret.strip()', 'ret.strip().decode("utf-8")')
|
||||
return code
|
||||
|
||||
def fixdir(dir):
|
||||
global all_modifs
|
||||
for k in all_modifs:
|
||||
for v in all_modifs[k]:
|
||||
modif(os.path.join(dir, 'wafadmin'), k, v)
|
||||
#print('substitutions finished')
|
1504
third_party/waf/waflib/Build.py
vendored
Normal file
1504
third_party/waf/waflib/Build.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
358
third_party/waf/waflib/ConfigSet.py
vendored
Normal file
358
third_party/waf/waflib/ConfigSet.py
vendored
Normal file
@ -0,0 +1,358 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"""
|
||||
|
||||
ConfigSet: a special dict
|
||||
|
||||
The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings)
|
||||
"""
|
||||
|
||||
import copy, re, os
|
||||
from waflib import Logs, Utils
|
||||
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
|
||||
|
||||
class ConfigSet(object):
|
||||
"""
|
||||
A copy-on-write dict with human-readable serialized format. The serialization format
|
||||
is human-readable (python-like) and performed by using eval() and repr().
|
||||
For high performance prefer pickle. Do not store functions as they are not serializable.
|
||||
|
||||
The values can be accessed by attributes or by keys::
|
||||
|
||||
from waflib.ConfigSet import ConfigSet
|
||||
env = ConfigSet()
|
||||
env.FOO = 'test'
|
||||
env['FOO'] = 'test'
|
||||
"""
|
||||
__slots__ = ('table', 'parent')
|
||||
def __init__(self, filename=None):
|
||||
self.table = {}
|
||||
"""
|
||||
Internal dict holding the object values
|
||||
"""
|
||||
#self.parent = None
|
||||
|
||||
if filename:
|
||||
self.load(filename)
|
||||
|
||||
def __contains__(self, key):
|
||||
"""
|
||||
Enables the *in* syntax::
|
||||
|
||||
if 'foo' in env:
|
||||
print(env['foo'])
|
||||
"""
|
||||
if key in self.table: return True
|
||||
try: return self.parent.__contains__(key)
|
||||
except AttributeError: return False # parent may not exist
|
||||
|
||||
def keys(self):
|
||||
"""Dict interface"""
|
||||
keys = set()
|
||||
cur = self
|
||||
while cur:
|
||||
keys.update(cur.table.keys())
|
||||
cur = getattr(cur, 'parent', None)
|
||||
keys = list(keys)
|
||||
keys.sort()
|
||||
return keys
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.keys())
|
||||
|
||||
def __str__(self):
|
||||
"""Text representation of the ConfigSet (for debugging purposes)"""
|
||||
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
Dictionary interface: get value from key::
|
||||
|
||||
def configure(conf):
|
||||
conf.env['foo'] = {}
|
||||
print(env['foo'])
|
||||
"""
|
||||
try:
|
||||
while 1:
|
||||
x = self.table.get(key)
|
||||
if not x is None:
|
||||
return x
|
||||
self = self.parent
|
||||
except AttributeError:
|
||||
return []
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""
|
||||
Dictionary interface: set value for key
|
||||
"""
|
||||
self.table[key] = value
|
||||
|
||||
def __delitem__(self, key):
|
||||
"""
|
||||
Dictionary interface: mark the key as missing
|
||||
"""
|
||||
self[key] = []
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""
|
||||
Attribute access provided for convenience. The following forms are equivalent::
|
||||
|
||||
def configure(conf):
|
||||
conf.env.value
|
||||
conf.env['value']
|
||||
"""
|
||||
if name in self.__slots__:
|
||||
return object.__getattr__(self, name)
|
||||
else:
|
||||
return self[name]
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
"""
|
||||
Attribute access provided for convenience. The following forms are equivalent::
|
||||
|
||||
def configure(conf):
|
||||
conf.env.value = x
|
||||
env['value'] = x
|
||||
"""
|
||||
if name in self.__slots__:
|
||||
object.__setattr__(self, name, value)
|
||||
else:
|
||||
self[name] = value
|
||||
|
||||
def __delattr__(self, name):
|
||||
"""
|
||||
Attribute access provided for convenience. The following forms are equivalent::
|
||||
|
||||
def configure(conf):
|
||||
del env.value
|
||||
del env['value']
|
||||
"""
|
||||
if name in self.__slots__:
|
||||
object.__delattr__(self, name)
|
||||
else:
|
||||
del self[name]
|
||||
|
||||
def derive(self):
|
||||
"""
|
||||
Returns a new ConfigSet deriving from self. The copy returned
|
||||
will be a shallow copy::
|
||||
|
||||
from waflib.ConfigSet import ConfigSet
|
||||
env = ConfigSet()
|
||||
env.append_value('CFLAGS', ['-O2'])
|
||||
child = env.derive()
|
||||
child.CFLAGS.append('test') # warning! this will modify 'env'
|
||||
child.CFLAGS = ['-O3'] # new list, ok
|
||||
child.append_value('CFLAGS', ['-O3']) # ok
|
||||
|
||||
Use :py:func:`ConfigSet.detach` to detach the child from the parent.
|
||||
"""
|
||||
newenv = ConfigSet()
|
||||
newenv.parent = self
|
||||
return newenv
|
||||
|
||||
def detach(self):
|
||||
"""
|
||||
Detaches this instance from its parent (if present)
|
||||
|
||||
Modifying the parent :py:class:`ConfigSet` will not change the current object
|
||||
Modifying this :py:class:`ConfigSet` will not modify the parent one.
|
||||
"""
|
||||
tbl = self.get_merged_dict()
|
||||
try:
|
||||
delattr(self, 'parent')
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
keys = tbl.keys()
|
||||
for x in keys:
|
||||
tbl[x] = copy.deepcopy(tbl[x])
|
||||
self.table = tbl
|
||||
return self
|
||||
|
||||
def get_flat(self, key):
|
||||
"""
|
||||
Returns a value as a string. If the input is a list, the value returned is space-separated.
|
||||
|
||||
:param key: key to use
|
||||
:type key: string
|
||||
"""
|
||||
s = self[key]
|
||||
if isinstance(s, str): return s
|
||||
return ' '.join(s)
|
||||
|
||||
def _get_list_value_for_modification(self, key):
|
||||
"""
|
||||
Returns a list value for further modification.
|
||||
|
||||
The list may be modified inplace and there is no need to do this afterwards::
|
||||
|
||||
self.table[var] = value
|
||||
"""
|
||||
try:
|
||||
value = self.table[key]
|
||||
except KeyError:
|
||||
try:
|
||||
value = self.parent[key]
|
||||
except AttributeError:
|
||||
value = []
|
||||
else:
|
||||
if isinstance(value, list):
|
||||
# force a copy
|
||||
value = value[:]
|
||||
else:
|
||||
value = [value]
|
||||
self.table[key] = value
|
||||
else:
|
||||
if not isinstance(value, list):
|
||||
self.table[key] = value = [value]
|
||||
return value
|
||||
|
||||
def append_value(self, var, val):
|
||||
"""
|
||||
Appends a value to the specified config key::
|
||||
|
||||
def build(bld):
|
||||
bld.env.append_value('CFLAGS', ['-O2'])
|
||||
|
||||
The value must be a list or a tuple
|
||||
"""
|
||||
if isinstance(val, str): # if there were string everywhere we could optimize this
|
||||
val = [val]
|
||||
current_value = self._get_list_value_for_modification(var)
|
||||
current_value.extend(val)
|
||||
|
||||
def prepend_value(self, var, val):
|
||||
"""
|
||||
Prepends a value to the specified item::
|
||||
|
||||
def configure(conf):
|
||||
conf.env.prepend_value('CFLAGS', ['-O2'])
|
||||
|
||||
The value must be a list or a tuple
|
||||
"""
|
||||
if isinstance(val, str):
|
||||
val = [val]
|
||||
self.table[var] = val + self._get_list_value_for_modification(var)
|
||||
|
||||
def append_unique(self, var, val):
|
||||
"""
|
||||
Appends a value to the specified item only if it's not already present::
|
||||
|
||||
def build(bld):
|
||||
bld.env.append_unique('CFLAGS', ['-O2', '-g'])
|
||||
|
||||
The value must be a list or a tuple
|
||||
"""
|
||||
if isinstance(val, str):
|
||||
val = [val]
|
||||
current_value = self._get_list_value_for_modification(var)
|
||||
|
||||
for x in val:
|
||||
if x not in current_value:
|
||||
current_value.append(x)
|
||||
|
||||
def get_merged_dict(self):
|
||||
"""
|
||||
Computes the merged dictionary from the fusion of self and all its parent
|
||||
|
||||
:rtype: a ConfigSet object
|
||||
"""
|
||||
table_list = []
|
||||
env = self
|
||||
while 1:
|
||||
table_list.insert(0, env.table)
|
||||
try: env = env.parent
|
||||
except AttributeError: break
|
||||
merged_table = {}
|
||||
for table in table_list:
|
||||
merged_table.update(table)
|
||||
return merged_table
|
||||
|
||||
def store(self, filename):
|
||||
"""
|
||||
Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files.
|
||||
|
||||
:param filename: file to use
|
||||
:type filename: string
|
||||
"""
|
||||
try:
|
||||
os.makedirs(os.path.split(filename)[0])
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
buf = []
|
||||
merged_table = self.get_merged_dict()
|
||||
keys = list(merged_table.keys())
|
||||
keys.sort()
|
||||
|
||||
try:
|
||||
fun = ascii
|
||||
except NameError:
|
||||
fun = repr
|
||||
|
||||
for k in keys:
|
||||
if k != 'undo_stack':
|
||||
buf.append('%s = %s\n' % (k, fun(merged_table[k])))
|
||||
Utils.writef(filename, ''.join(buf))
|
||||
|
||||
def load(self, filename):
|
||||
"""
|
||||
Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`.
|
||||
|
||||
:param filename: file to use
|
||||
:type filename: string
|
||||
"""
|
||||
tbl = self.table
|
||||
code = Utils.readf(filename, m='rU')
|
||||
for m in re_imp.finditer(code):
|
||||
g = m.group
|
||||
tbl[g(2)] = eval(g(3))
|
||||
Logs.debug('env: %s', self.table)
|
||||
|
||||
def update(self, d):
|
||||
"""
|
||||
Dictionary interface: replace values with the ones from another dict
|
||||
|
||||
:param d: object to use the value from
|
||||
:type d: dict-like object
|
||||
"""
|
||||
self.table.update(d)
|
||||
|
||||
def stash(self):
|
||||
"""
|
||||
Stores the object state to provide transactionality semantics::
|
||||
|
||||
env = ConfigSet()
|
||||
env.stash()
|
||||
try:
|
||||
env.append_value('CFLAGS', '-O3')
|
||||
call_some_method(env)
|
||||
finally:
|
||||
env.revert()
|
||||
|
||||
The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store`
|
||||
"""
|
||||
orig = self.table
|
||||
tbl = self.table = self.table.copy()
|
||||
for x in tbl.keys():
|
||||
tbl[x] = copy.deepcopy(tbl[x])
|
||||
self.undo_stack = self.undo_stack + [orig]
|
||||
|
||||
def commit(self):
|
||||
"""
|
||||
Commits transactional changes. See :py:meth:`ConfigSet.stash`
|
||||
"""
|
||||
self.undo_stack.pop(-1)
|
||||
|
||||
def revert(self):
|
||||
"""
|
||||
Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
|
||||
"""
|
||||
self.table = self.undo_stack.pop(-1)
|
641
third_party/waf/waflib/Configure.py
vendored
Normal file
641
third_party/waf/waflib/Configure.py
vendored
Normal file
@ -0,0 +1,641 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"""
|
||||
Configuration system
|
||||
|
||||
A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to:
|
||||
|
||||
* create data dictionaries (ConfigSet instances)
|
||||
* store the list of modules to import
|
||||
* hold configuration routines such as ``find_program``, etc
|
||||
"""
|
||||
|
||||
import os, shlex, sys, time, re, shutil
|
||||
from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
|
||||
|
||||
WAF_CONFIG_LOG = 'config.log'
|
||||
"""Name of the configuration log file"""
|
||||
|
||||
autoconfig = False
|
||||
"""Execute the configuration automatically"""
|
||||
|
||||
conf_template = '''# project %(app)s configured on %(now)s by
|
||||
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
|
||||
# using %(args)s
|
||||
#'''
|
||||
|
||||
class ConfigurationContext(Context.Context):
|
||||
'''configures the project'''
|
||||
|
||||
cmd = 'configure'
|
||||
|
||||
error_handlers = []
|
||||
"""
|
||||
Additional functions to handle configuration errors
|
||||
"""
|
||||
|
||||
def __init__(self, **kw):
|
||||
super(ConfigurationContext, self).__init__(**kw)
|
||||
self.environ = dict(os.environ)
|
||||
self.all_envs = {}
|
||||
|
||||
self.top_dir = None
|
||||
self.out_dir = None
|
||||
|
||||
self.tools = [] # tools loaded in the configuration, and that will be loaded when building
|
||||
|
||||
self.hash = 0
|
||||
self.files = []
|
||||
|
||||
self.tool_cache = []
|
||||
|
||||
self.setenv('')
|
||||
|
||||
def setenv(self, name, env=None):
|
||||
"""
|
||||
Set a new config set for conf.env. If a config set of that name already exists,
|
||||
recall it without modification.
|
||||
|
||||
The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it
|
||||
is also used as *variants* by the build commands.
|
||||
Though related to variants, whatever kind of data may be stored in the config set::
|
||||
|
||||
def configure(cfg):
|
||||
cfg.env.ONE = 1
|
||||
cfg.setenv('foo')
|
||||
cfg.env.ONE = 2
|
||||
|
||||
def build(bld):
|
||||
2 == bld.env_of_name('foo').ONE
|
||||
|
||||
:param name: name of the configuration set
|
||||
:type name: string
|
||||
:param env: ConfigSet to copy, or an empty ConfigSet is created
|
||||
:type env: :py:class:`waflib.ConfigSet.ConfigSet`
|
||||
"""
|
||||
if name not in self.all_envs or env:
|
||||
if not env:
|
||||
env = ConfigSet.ConfigSet()
|
||||
self.prepare_env(env)
|
||||
else:
|
||||
env = env.derive()
|
||||
self.all_envs[name] = env
|
||||
self.variant = name
|
||||
|
||||
def get_env(self):
|
||||
"""Getter for the env property"""
|
||||
return self.all_envs[self.variant]
|
||||
def set_env(self, val):
|
||||
"""Setter for the env property"""
|
||||
self.all_envs[self.variant] = val
|
||||
|
||||
env = property(get_env, set_env)
|
||||
|
||||
def init_dirs(self):
|
||||
"""
|
||||
Initialize the project directory and the build directory
|
||||
"""
|
||||
|
||||
top = self.top_dir
|
||||
if not top:
|
||||
top = Options.options.top
|
||||
if not top:
|
||||
top = getattr(Context.g_module, Context.TOP, None)
|
||||
if not top:
|
||||
top = self.path.abspath()
|
||||
top = os.path.abspath(top)
|
||||
|
||||
self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top)
|
||||
assert(self.srcnode)
|
||||
|
||||
out = self.out_dir
|
||||
if not out:
|
||||
out = Options.options.out
|
||||
if not out:
|
||||
out = getattr(Context.g_module, Context.OUT, None)
|
||||
if not out:
|
||||
out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '')
|
||||
|
||||
# someone can be messing with symlinks
|
||||
out = os.path.realpath(out)
|
||||
|
||||
self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out)
|
||||
self.bldnode.mkdir()
|
||||
|
||||
if not os.path.isdir(self.bldnode.abspath()):
|
||||
conf.fatal('Could not create the build directory %s' % self.bldnode.abspath())
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
See :py:func:`waflib.Context.Context.execute`
|
||||
"""
|
||||
self.init_dirs()
|
||||
|
||||
self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
|
||||
self.cachedir.mkdir()
|
||||
|
||||
path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
|
||||
self.logger = Logs.make_logger(path, 'cfg')
|
||||
|
||||
app = getattr(Context.g_module, 'APPNAME', '')
|
||||
if app:
|
||||
ver = getattr(Context.g_module, 'VERSION', '')
|
||||
if ver:
|
||||
app = "%s (%s)" % (app, ver)
|
||||
|
||||
params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app}
|
||||
self.to_log(conf_template % params)
|
||||
self.msg('Setting top to', self.srcnode.abspath())
|
||||
self.msg('Setting out to', self.bldnode.abspath())
|
||||
|
||||
if id(self.srcnode) == id(self.bldnode):
|
||||
Logs.warn('Setting top == out')
|
||||
elif id(self.path) != id(self.srcnode):
|
||||
if self.srcnode.is_child_of(self.path):
|
||||
Logs.warn('Are you certain that you do not want to set top="." ?')
|
||||
|
||||
super(ConfigurationContext, self).execute()
|
||||
|
||||
self.store()
|
||||
|
||||
Context.top_dir = self.srcnode.abspath()
|
||||
Context.out_dir = self.bldnode.abspath()
|
||||
|
||||
# this will write a configure lock so that subsequent builds will
|
||||
# consider the current path as the root directory (see prepare_impl).
|
||||
# to remove: use 'waf distclean'
|
||||
env = ConfigSet.ConfigSet()
|
||||
env.argv = sys.argv
|
||||
env.options = Options.options.__dict__
|
||||
env.config_cmd = self.cmd
|
||||
|
||||
env.run_dir = Context.run_dir
|
||||
env.top_dir = Context.top_dir
|
||||
env.out_dir = Context.out_dir
|
||||
|
||||
# conf.hash & conf.files hold wscript files paths and hash
|
||||
# (used only by Configure.autoconfig)
|
||||
env.hash = self.hash
|
||||
env.files = self.files
|
||||
env.environ = dict(self.environ)
|
||||
|
||||
if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')):
|
||||
env.store(os.path.join(Context.run_dir, Options.lockfile))
|
||||
if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')):
|
||||
env.store(os.path.join(Context.top_dir, Options.lockfile))
|
||||
if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')):
|
||||
env.store(os.path.join(Context.out_dir, Options.lockfile))
|
||||
|
||||
def prepare_env(self, env):
|
||||
"""
|
||||
Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env``
|
||||
|
||||
:type env: :py:class:`waflib.ConfigSet.ConfigSet`
|
||||
:param env: a ConfigSet, usually ``conf.env``
|
||||
"""
|
||||
if not env.PREFIX:
|
||||
if Options.options.prefix or Utils.is_win32:
|
||||
env.PREFIX = Utils.sane_path(Options.options.prefix)
|
||||
else:
|
||||
env.PREFIX = ''
|
||||
if not env.BINDIR:
|
||||
if Options.options.bindir:
|
||||
env.BINDIR = Utils.sane_path(Options.options.bindir)
|
||||
else:
|
||||
env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
|
||||
if not env.LIBDIR:
|
||||
if Options.options.libdir:
|
||||
env.LIBDIR = Utils.sane_path(Options.options.libdir)
|
||||
else:
|
||||
env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
|
||||
|
||||
def store(self):
|
||||
"""Save the config results into the cache file"""
|
||||
n = self.cachedir.make_node('build.config.py')
|
||||
n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools))
|
||||
|
||||
if not self.all_envs:
|
||||
self.fatal('nothing to store in the configuration context!')
|
||||
|
||||
for key in self.all_envs:
|
||||
tmpenv = self.all_envs[key]
|
||||
tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
|
||||
|
||||
def load(self, input, tooldir=None, funs=None, with_sys_path=True, cache=False):
|
||||
"""
|
||||
Load Waf tools, which will be imported whenever a build is started.
|
||||
|
||||
:param input: waf tools to import
|
||||
:type input: list of string
|
||||
:param tooldir: paths for the imports
|
||||
:type tooldir: list of string
|
||||
:param funs: functions to execute from the waf tools
|
||||
:type funs: list of string
|
||||
:param cache: whether to prevent the tool from running twice
|
||||
:type cache: bool
|
||||
"""
|
||||
|
||||
tools = Utils.to_list(input)
|
||||
if tooldir: tooldir = Utils.to_list(tooldir)
|
||||
for tool in tools:
|
||||
# avoid loading the same tool more than once with the same functions
|
||||
# used by composite projects
|
||||
|
||||
if cache:
|
||||
mag = (tool, id(self.env), tooldir, funs)
|
||||
if mag in self.tool_cache:
|
||||
self.to_log('(tool %s is already loaded, skipping)' % tool)
|
||||
continue
|
||||
self.tool_cache.append(mag)
|
||||
|
||||
module = None
|
||||
try:
|
||||
module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
|
||||
except ImportError ,e:
|
||||
self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, sys.path, e))
|
||||
except Exception ,e:
|
||||
self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
|
||||
self.to_log(Utils.ex_stack())
|
||||
raise
|
||||
|
||||
if funs is not None:
|
||||
self.eval_rules(funs)
|
||||
else:
|
||||
func = getattr(module, 'configure', None)
|
||||
if func:
|
||||
if type(func) is type(Utils.readf): func(self)
|
||||
else: self.eval_rules(func)
|
||||
|
||||
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
|
||||
|
||||
def post_recurse(self, node):
|
||||
"""
|
||||
Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse`
|
||||
|
||||
:param node: script
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
super(ConfigurationContext, self).post_recurse(node)
|
||||
self.hash = Utils.h_list((self.hash, node.read('rb')))
|
||||
self.files.append(node.abspath())
|
||||
|
||||
def eval_rules(self, rules):
|
||||
"""
|
||||
Execute configuration tests provided as list of funcitons to run
|
||||
|
||||
:param rules: list of configuration method names
|
||||
:type rules: list of string
|
||||
"""
|
||||
self.rules = Utils.to_list(rules)
|
||||
for x in self.rules:
|
||||
f = getattr(self, x)
|
||||
if not f:
|
||||
self.fatal('No such configuration function %r' % x)
|
||||
f()
|
||||
|
||||
def conf(f):
|
||||
"""
|
||||
Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and
|
||||
:py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter
|
||||
named 'mandatory' to disable the configuration errors::
|
||||
|
||||
def configure(conf):
|
||||
conf.find_program('abc', mandatory=False)
|
||||
|
||||
:param f: method to bind
|
||||
:type f: function
|
||||
"""
|
||||
def fun(*k, **kw):
|
||||
mandatory = True
|
||||
if 'mandatory' in kw:
|
||||
mandatory = kw['mandatory']
|
||||
del kw['mandatory']
|
||||
|
||||
try:
|
||||
return f(*k, **kw)
|
||||
except Errors.ConfigurationError:
|
||||
if mandatory:
|
||||
raise
|
||||
|
||||
fun.__name__ = f.__name__
|
||||
setattr(ConfigurationContext, f.__name__, fun)
|
||||
setattr(Build.BuildContext, f.__name__, fun)
|
||||
return f
|
||||
|
||||
@conf
|
||||
def add_os_flags(self, var, dest=None, dup=False):
|
||||
"""
|
||||
Import operating system environment values into ``conf.env`` dict::
|
||||
|
||||
def configure(conf):
|
||||
conf.add_os_flags('CFLAGS')
|
||||
|
||||
:param var: variable to use
|
||||
:type var: string
|
||||
:param dest: destination variable, by default the same as var
|
||||
:type dest: string
|
||||
:param dup: add the same set of flags again
|
||||
:type dup: bool
|
||||
"""
|
||||
try:
|
||||
flags = shlex.split(self.environ[var])
|
||||
except KeyError:
|
||||
return
|
||||
if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])):
|
||||
self.env.append_value(dest or var, flags)
|
||||
|
||||
@conf
|
||||
def cmd_to_list(self, cmd):
|
||||
"""
|
||||
Detect if a command is written in pseudo shell like ``ccache g++`` and return a list.
|
||||
|
||||
:param cmd: command
|
||||
:type cmd: a string or a list of string
|
||||
"""
|
||||
if isinstance(cmd, str):
|
||||
if os.path.isfile(cmd):
|
||||
# do not take any risk
|
||||
return [cmd]
|
||||
if os.sep == '/':
|
||||
return shlex.split(cmd)
|
||||
else:
|
||||
try:
|
||||
return shlex.split(cmd, posix=False)
|
||||
except TypeError:
|
||||
# Python 2.5 on windows?
|
||||
return shlex.split(cmd)
|
||||
return cmd
|
||||
|
||||
@conf
|
||||
def check_waf_version(self, mini='1.8.99', maxi='2.0.0', **kw):
|
||||
"""
|
||||
Raise a Configuration error if the Waf version does not strictly match the given bounds::
|
||||
|
||||
conf.check_waf_version(mini='1.8.99', maxi='2.0.0')
|
||||
|
||||
:type mini: number, tuple or string
|
||||
:param mini: Minimum required version
|
||||
:type maxi: number, tuple or string
|
||||
:param maxi: Maximum allowed version
|
||||
"""
|
||||
self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)), **kw)
|
||||
ver = Context.HEXVERSION
|
||||
if Utils.num2ver(mini) > ver:
|
||||
self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver))
|
||||
if Utils.num2ver(maxi) < ver:
|
||||
self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver))
|
||||
self.end_msg('ok', **kw)
|
||||
|
||||
@conf
|
||||
def find_file(self, filename, path_list=[]):
|
||||
"""
|
||||
Find a file in a list of paths
|
||||
|
||||
:param filename: name of the file to search for
|
||||
:param path_list: list of directories to search
|
||||
:return: the first occurrence filename or '' if filename could not be found
|
||||
"""
|
||||
for n in Utils.to_list(filename):
|
||||
for d in Utils.to_list(path_list):
|
||||
p = os.path.expanduser(os.path.join(d, n))
|
||||
if os.path.exists(p):
|
||||
return p
|
||||
self.fatal('Could not find %r' % filename)
|
||||
|
||||
@conf
|
||||
def find_program(self, filename, **kw):
|
||||
"""
|
||||
Search for a program on the operating system
|
||||
|
||||
When var is used, you may set os.environ[var] to help find a specific program version, for example::
|
||||
|
||||
$ CC='ccache gcc' waf configure
|
||||
|
||||
:param path_list: paths to use for searching
|
||||
:type param_list: list of string
|
||||
:param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings
|
||||
:type var: string
|
||||
:param value: obtain the program from the value passed exclusively
|
||||
:type value: list or string (list is preferred)
|
||||
:param ext: list of extensions for the binary (do not add an extension for portability)
|
||||
:type ext: list of string
|
||||
:param msg: name to display in the log, by default filename is used
|
||||
:type msg: string
|
||||
:param interpreter: interpreter for the program
|
||||
:type interpreter: ConfigSet variable key
|
||||
"""
|
||||
|
||||
exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
|
||||
|
||||
environ = kw.get('environ', getattr(self, 'environ', os.environ))
|
||||
|
||||
ret = ''
|
||||
|
||||
filename = Utils.to_list(filename)
|
||||
msg = kw.get('msg', ', '.join(filename))
|
||||
|
||||
var = kw.get('var', '')
|
||||
if not var:
|
||||
var = re.sub(r'[-.]', '_', filename[0].upper())
|
||||
|
||||
path_list = kw.get('path_list', '')
|
||||
if path_list:
|
||||
path_list = Utils.to_list(path_list)
|
||||
else:
|
||||
path_list = environ.get('PATH', '').split(os.pathsep)
|
||||
|
||||
if kw.get('value'):
|
||||
# user-provided in command-line options and passed to find_program
|
||||
ret = self.cmd_to_list(kw['value'])
|
||||
elif environ.get(var):
|
||||
# user-provided in the os environment
|
||||
ret = self.cmd_to_list(environ[var])
|
||||
elif self.env[var]:
|
||||
# a default option in the wscript file
|
||||
ret = self.cmd_to_list(self.env[var])
|
||||
else:
|
||||
if not ret:
|
||||
ret = self.find_binary(filename, exts.split(','), path_list)
|
||||
if not ret and Utils.winreg:
|
||||
ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
|
||||
if not ret and Utils.winreg:
|
||||
ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
|
||||
ret = self.cmd_to_list(ret)
|
||||
|
||||
if ret:
|
||||
if len(ret) == 1:
|
||||
retmsg = ret[0]
|
||||
else:
|
||||
retmsg = ret
|
||||
else:
|
||||
retmsg = False
|
||||
|
||||
self.msg('Checking for program %r' % msg, retmsg, **kw)
|
||||
if not kw.get('quiet'):
|
||||
self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))
|
||||
|
||||
if not ret:
|
||||
self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)
|
||||
|
||||
interpreter = kw.get('interpreter')
|
||||
if interpreter is None:
|
||||
if not Utils.check_exe(ret[0], env=environ):
|
||||
self.fatal('Program %r is not executable' % ret)
|
||||
self.env[var] = ret
|
||||
else:
|
||||
self.env[var] = self.env[interpreter] + ret
|
||||
|
||||
return ret
|
||||
|
||||
@conf
|
||||
def find_binary(self, filenames, exts, paths):
|
||||
for f in filenames:
|
||||
for ext in exts:
|
||||
exe_name = f + ext
|
||||
if os.path.isabs(exe_name):
|
||||
if os.path.isfile(exe_name):
|
||||
return exe_name
|
||||
else:
|
||||
for path in paths:
|
||||
x = os.path.expanduser(os.path.join(path, exe_name))
|
||||
if os.path.isfile(x):
|
||||
return x
|
||||
return None
|
||||
|
||||
@conf
|
||||
def run_build(self, *k, **kw):
|
||||
"""
|
||||
Create a temporary build context to execute a build. A reference to that build
|
||||
context is kept on self.test_bld for debugging purposes, and you should not rely
|
||||
on it too much (read the note on the cache below).
|
||||
The parameters given in the arguments to this function are passed as arguments for
|
||||
a single task generator created in the build. Only three parameters are obligatory:
|
||||
|
||||
:param features: features to pass to a task generator created in the build
|
||||
:type features: list of string
|
||||
:param compile_filename: file to create for the compilation (default: *test.c*)
|
||||
:type compile_filename: string
|
||||
:param code: code to write in the filename to compile
|
||||
:type code: string
|
||||
|
||||
Though this function returns *0* by default, the build may set an attribute named *retval* on the
|
||||
build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
|
||||
|
||||
This function also provides a limited cache. To use it, provide the following option::
|
||||
|
||||
def options(opt):
|
||||
opt.add_option('--confcache', dest='confcache', default=0,
|
||||
action='count', help='Use a configuration cache')
|
||||
|
||||
And execute the configuration with the following command-line::
|
||||
|
||||
$ waf configure --confcache
|
||||
|
||||
"""
|
||||
lst = [str(v) for (p, v) in kw.items() if p != 'env']
|
||||
h = Utils.h_list(lst)
|
||||
dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
|
||||
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.stat(dir)
|
||||
except OSError:
|
||||
self.fatal('cannot use the configuration test folder %r' % dir)
|
||||
|
||||
cachemode = getattr(Options.options, 'confcache', None)
|
||||
if cachemode == 1:
|
||||
try:
|
||||
proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
|
||||
except EnvironmentError:
|
||||
pass
|
||||
else:
|
||||
ret = proj['cache_run_build']
|
||||
if isinstance(ret, str) and ret.startswith('Test does not build'):
|
||||
self.fatal(ret)
|
||||
return ret
|
||||
|
||||
bdir = os.path.join(dir, 'testbuild')
|
||||
|
||||
if not os.path.exists(bdir):
|
||||
os.makedirs(bdir)
|
||||
|
||||
cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
|
||||
self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
|
||||
bld.init_dirs()
|
||||
bld.progress_bar = 0
|
||||
bld.targets = '*'
|
||||
|
||||
bld.logger = self.logger
|
||||
bld.all_envs.update(self.all_envs) # not really necessary
|
||||
bld.env = kw['env']
|
||||
|
||||
bld.kw = kw
|
||||
bld.conf = self
|
||||
kw['build_fun'](bld)
|
||||
ret = -1
|
||||
try:
|
||||
try:
|
||||
bld.compile()
|
||||
except Errors.WafError:
|
||||
ret = 'Test does not build: %s' % Utils.ex_stack()
|
||||
self.fatal(ret)
|
||||
else:
|
||||
ret = getattr(bld, 'retval', 0)
|
||||
finally:
|
||||
if cachemode == 1:
|
||||
# cache the results each time
|
||||
proj = ConfigSet.ConfigSet()
|
||||
proj['cache_run_build'] = ret
|
||||
proj.store(os.path.join(dir, 'cache_run_build'))
|
||||
else:
|
||||
shutil.rmtree(dir)
|
||||
return ret
|
||||
|
||||
@conf
|
||||
def ret_msg(self, msg, args):
|
||||
if isinstance(msg, str):
|
||||
return msg
|
||||
return msg(args)
|
||||
|
||||
@conf
|
||||
def test(self, *k, **kw):
|
||||
|
||||
if not 'env' in kw:
|
||||
kw['env'] = self.env.derive()
|
||||
|
||||
# validate_c for example
|
||||
if kw.get('validate'):
|
||||
kw['validate'](kw)
|
||||
|
||||
self.start_msg(kw['msg'], **kw)
|
||||
ret = None
|
||||
try:
|
||||
ret = self.run_build(*k, **kw)
|
||||
except self.errors.ConfigurationError:
|
||||
self.end_msg(kw['errmsg'], 'YELLOW', **kw)
|
||||
if Logs.verbose > 1:
|
||||
raise
|
||||
else:
|
||||
self.fatal('The configuration failed')
|
||||
else:
|
||||
kw['success'] = ret
|
||||
|
||||
if kw.get('post_check'):
|
||||
ret = kw['post_check'](kw)
|
||||
|
||||
if ret:
|
||||
self.end_msg(kw['errmsg'], 'YELLOW', **kw)
|
||||
self.fatal('The configuration failed %r' % ret)
|
||||
else:
|
||||
self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
|
||||
return ret
|
723
third_party/waf/waflib/Context.py
vendored
Normal file
723
third_party/waf/waflib/Context.py
vendored
Normal file
@ -0,0 +1,723 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2010-2016 (ita)
|
||||
|
||||
"""
|
||||
Classes and functions enabling the command system
|
||||
"""
|
||||
|
||||
import os, re, imp, sys
|
||||
from waflib import Utils, Errors, Logs
|
||||
import waflib.Node
|
||||
|
||||
# the following 3 constants are updated on each new release (do not touch)
|
||||
HEXVERSION=0x1090a00
|
||||
"""Constant updated on new releases"""
|
||||
|
||||
WAFVERSION="1.9.10"
|
||||
"""Constant updated on new releases"""
|
||||
|
||||
WAFREVISION="ae3f254315e0dcea4059703987148882ba414894"
|
||||
"""Git revision when the waf version is updated"""
|
||||
|
||||
ABI = 99
|
||||
"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
|
||||
|
||||
DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
|
||||
"""Name of the pickle file for storing the build data"""
|
||||
|
||||
APPNAME = 'APPNAME'
|
||||
"""Default application name (used by ``waf dist``)"""
|
||||
|
||||
VERSION = 'VERSION'
|
||||
"""Default application version (used by ``waf dist``)"""
|
||||
|
||||
TOP = 'top'
|
||||
"""The variable name for the top-level directory in wscript files"""
|
||||
|
||||
OUT = 'out'
|
||||
"""The variable name for the output directory in wscript files"""
|
||||
|
||||
WSCRIPT_FILE = 'wscript'
|
||||
"""Name of the waf script files"""
|
||||
|
||||
launch_dir = ''
|
||||
"""Directory from which waf has been called"""
|
||||
run_dir = ''
|
||||
"""Location of the wscript file to use as the entry point"""
|
||||
top_dir = ''
|
||||
"""Location of the project directory (top), if the project was configured"""
|
||||
out_dir = ''
|
||||
"""Location of the build directory (out), if the project was configured"""
|
||||
waf_dir = ''
|
||||
"""Directory containing the waf modules"""
|
||||
|
||||
g_module = None
|
||||
"""
|
||||
Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`)
|
||||
"""
|
||||
|
||||
STDOUT = 1
|
||||
STDERR = -1
|
||||
BOTH = 0
|
||||
|
||||
classes = []
|
||||
"""
|
||||
List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes
|
||||
are added automatically by a metaclass.
|
||||
"""
|
||||
|
||||
def create_context(cmd_name, *k, **kw):
|
||||
"""
|
||||
Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
|
||||
Used in particular by :py:func:`waflib.Scripting.run_command`
|
||||
|
||||
:param cmd_name: command name
|
||||
:type cmd_name: string
|
||||
:param k: arguments to give to the context class initializer
|
||||
:type k: list
|
||||
:param k: keyword arguments to give to the context class initializer
|
||||
:type k: dict
|
||||
:return: Context object
|
||||
:rtype: :py:class:`waflib.Context.Context`
|
||||
"""
|
||||
global classes
|
||||
for x in classes:
|
||||
if x.cmd == cmd_name:
|
||||
return x(*k, **kw)
|
||||
ctx = Context(*k, **kw)
|
||||
ctx.fun = cmd_name
|
||||
return ctx
|
||||
|
||||
class store_context(type):
|
||||
"""
|
||||
Metaclass that registers command classes into the list :py:const:`waflib.Context.classes`
|
||||
Context classes must provide an attribute 'cmd' representing the command name, and a function
|
||||
attribute 'fun' representing the function name that the command uses.
|
||||
"""
|
||||
def __init__(cls, name, bases, dict):
|
||||
super(store_context, cls).__init__(name, bases, dict)
|
||||
name = cls.__name__
|
||||
|
||||
if name in ('ctx', 'Context'):
|
||||
return
|
||||
|
||||
try:
|
||||
cls.cmd
|
||||
except AttributeError:
|
||||
raise Errors.WafError('Missing command for the context class %r (cmd)' % name)
|
||||
|
||||
if not getattr(cls, 'fun', None):
|
||||
cls.fun = cls.cmd
|
||||
|
||||
global classes
|
||||
classes.insert(0, cls)
|
||||
|
||||
ctx = store_context('ctx', (object,), {})
|
||||
"""Base class for all :py:class:`waflib.Context.Context` classes"""
|
||||
|
||||
class Context(ctx):
|
||||
"""
|
||||
Default context for waf commands, and base class for new command contexts.
|
||||
|
||||
Context objects are passed to top-level functions::
|
||||
|
||||
def foo(ctx):
|
||||
print(ctx.__class__.__name__) # waflib.Context.Context
|
||||
|
||||
Subclasses must define the class attributes 'cmd' and 'fun':
|
||||
|
||||
:param cmd: command to execute as in ``waf cmd``
|
||||
:type cmd: string
|
||||
:param fun: function name to execute when the command is called
|
||||
:type fun: string
|
||||
|
||||
.. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext
|
||||
|
||||
"""
|
||||
|
||||
errors = Errors
|
||||
"""
|
||||
Shortcut to :py:mod:`waflib.Errors` provided for convenience
|
||||
"""
|
||||
|
||||
tools = {}
|
||||
"""
|
||||
A module cache for wscript files; see :py:meth:`Context.Context.load`
|
||||
"""
|
||||
|
||||
def __init__(self, **kw):
|
||||
try:
|
||||
rd = kw['run_dir']
|
||||
except KeyError:
|
||||
global run_dir
|
||||
rd = run_dir
|
||||
|
||||
# binds the context to the nodes in use to avoid a context singleton
|
||||
self.node_class = type('Nod3', (waflib.Node.Node,), {})
|
||||
self.node_class.__module__ = 'waflib.Node'
|
||||
self.node_class.ctx = self
|
||||
|
||||
self.root = self.node_class('', None)
|
||||
self.cur_script = None
|
||||
self.path = self.root.find_dir(rd)
|
||||
|
||||
self.stack_path = []
|
||||
self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
|
||||
self.logger = None
|
||||
|
||||
def finalize(self):
|
||||
"""
|
||||
Called to free resources such as logger files
|
||||
"""
|
||||
try:
|
||||
logger = self.logger
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
Logs.free_logger(logger)
|
||||
delattr(self, 'logger')
|
||||
|
||||
def load(self, tool_list, *k, **kw):
|
||||
"""
|
||||
Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun`
|
||||
from it. A ``tooldir`` argument may be provided as a list of module paths.
|
||||
|
||||
:param tool_list: list of Waf tool names to load
|
||||
:type tool_list: list of string or space-separated string
|
||||
"""
|
||||
tools = Utils.to_list(tool_list)
|
||||
path = Utils.to_list(kw.get('tooldir', ''))
|
||||
with_sys_path = kw.get('with_sys_path', True)
|
||||
|
||||
for t in tools:
|
||||
module = load_tool(t, path, with_sys_path=with_sys_path)
|
||||
fun = getattr(module, kw.get('name', self.fun), None)
|
||||
if fun:
|
||||
fun(self)
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
Here, it calls the function name in the top-level wscript file. Most subclasses
|
||||
redefine this method to provide additional functionality.
|
||||
"""
|
||||
global g_module
|
||||
self.recurse([os.path.dirname(g_module.root_path)])
|
||||
|
||||
def pre_recurse(self, node):
|
||||
"""
|
||||
Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`.
|
||||
The current script is bound as a Node object on ``self.cur_script``, and the current path
|
||||
is bound to ``self.path``
|
||||
|
||||
:param node: script
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
self.stack_path.append(self.cur_script)
|
||||
|
||||
self.cur_script = node
|
||||
self.path = node.parent
|
||||
|
||||
def post_recurse(self, node):
|
||||
"""
|
||||
Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
|
||||
|
||||
:param node: script
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
self.cur_script = self.stack_path.pop()
|
||||
if self.cur_script:
|
||||
self.path = self.cur_script.parent
|
||||
|
||||
def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
|
||||
"""
|
||||
Runs user-provided functions from the supplied list of directories.
|
||||
The directories can be either absolute, or relative to the directory
|
||||
of the wscript file
|
||||
|
||||
The methods :py:meth:`waflib.Context.Context.pre_recurse` and
|
||||
:py:meth:`waflib.Context.Context.post_recurse` are called immediately before
|
||||
and after a script has been executed.
|
||||
|
||||
:param dirs: List of directories to visit
|
||||
:type dirs: list of string or space-separated string
|
||||
:param name: Name of function to invoke from the wscript
|
||||
:type name: string
|
||||
:param mandatory: whether sub wscript files are required to exist
|
||||
:type mandatory: bool
|
||||
:param once: read the script file once for a particular context
|
||||
:type once: bool
|
||||
"""
|
||||
try:
|
||||
cache = self.recurse_cache
|
||||
except AttributeError:
|
||||
cache = self.recurse_cache = {}
|
||||
|
||||
for d in Utils.to_list(dirs):
|
||||
|
||||
if not os.path.isabs(d):
|
||||
# absolute paths only
|
||||
d = os.path.join(self.path.abspath(), d)
|
||||
|
||||
WSCRIPT = os.path.join(d, WSCRIPT_FILE)
|
||||
WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)
|
||||
|
||||
node = self.root.find_node(WSCRIPT_FUN)
|
||||
if node and (not once or node not in cache):
|
||||
cache[node] = True
|
||||
self.pre_recurse(node)
|
||||
try:
|
||||
function_code = node.read('rU', encoding)
|
||||
exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
|
||||
finally:
|
||||
self.post_recurse(node)
|
||||
elif not node:
|
||||
node = self.root.find_node(WSCRIPT)
|
||||
tup = (node, name or self.fun)
|
||||
if node and (not once or tup not in cache):
|
||||
cache[tup] = True
|
||||
self.pre_recurse(node)
|
||||
try:
|
||||
wscript_module = load_module(node.abspath(), encoding=encoding)
|
||||
user_function = getattr(wscript_module, (name or self.fun), None)
|
||||
if not user_function:
|
||||
if not mandatory:
|
||||
continue
|
||||
raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath()))
|
||||
user_function(self)
|
||||
finally:
|
||||
self.post_recurse(node)
|
||||
elif not node:
|
||||
if not mandatory:
|
||||
continue
|
||||
try:
|
||||
os.listdir(d)
|
||||
except OSError:
|
||||
raise Errors.WafError('Cannot read the folder %r' % d)
|
||||
raise Errors.WafError('No wscript file in directory %s' % d)
|
||||
|
||||
def exec_command(self, cmd, **kw):
|
||||
"""
|
||||
Runs an external process and returns the exit status::
|
||||
|
||||
def run(tsk):
|
||||
ret = tsk.generator.bld.exec_command('touch foo.txt')
|
||||
return ret
|
||||
|
||||
If the context has the attribute 'log', then captures and logs the process stderr/stdout.
|
||||
Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
|
||||
stdout/stderr values captured.
|
||||
|
||||
:param cmd: command argument for subprocess.Popen
|
||||
:type cmd: string or list
|
||||
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
|
||||
:type kw: dict
|
||||
:returns: process exit status
|
||||
:rtype: integer
|
||||
"""
|
||||
subprocess = Utils.subprocess
|
||||
kw['shell'] = isinstance(cmd, str)
|
||||
Logs.debug('runner: %r', cmd)
|
||||
Logs.debug('runner_env: kw=%s', kw)
|
||||
|
||||
if self.logger:
|
||||
self.logger.info(cmd)
|
||||
|
||||
if 'stdout' not in kw:
|
||||
kw['stdout'] = subprocess.PIPE
|
||||
if 'stderr' not in kw:
|
||||
kw['stderr'] = subprocess.PIPE
|
||||
|
||||
if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
|
||||
raise Errors.WafError('Program %s not found!' % cmd[0])
|
||||
|
||||
cargs = {}
|
||||
if 'timeout' in kw:
|
||||
if sys.hexversion >= 0x3030000:
|
||||
cargs['timeout'] = kw['timeout']
|
||||
if not 'start_new_session' in kw:
|
||||
kw['start_new_session'] = True
|
||||
del kw['timeout']
|
||||
if 'input' in kw:
|
||||
if kw['input']:
|
||||
cargs['input'] = kw['input']
|
||||
kw['stdin'] = subprocess.PIPE
|
||||
del kw['input']
|
||||
|
||||
if 'cwd' in kw:
|
||||
if not isinstance(kw['cwd'], str):
|
||||
kw['cwd'] = kw['cwd'].abspath()
|
||||
|
||||
try:
|
||||
ret, out, err = Utils.run_process(cmd, kw, cargs)
|
||||
except Exception ,e:
|
||||
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
|
||||
|
||||
if out:
|
||||
if not isinstance(out, str):
|
||||
out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
|
||||
if self.logger:
|
||||
self.logger.debug('out: %s', out)
|
||||
else:
|
||||
Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
|
||||
if err:
|
||||
if not isinstance(err, str):
|
||||
err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
|
||||
if self.logger:
|
||||
self.logger.error('err: %s' % err)
|
||||
else:
|
||||
Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
|
||||
|
||||
return ret
|
||||
|
||||
def cmd_and_log(self, cmd, **kw):
|
||||
"""
|
||||
Executes a process and returns stdout/stderr if the execution is successful.
|
||||
An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
|
||||
will be bound to the WafError object::
|
||||
|
||||
def configure(conf):
|
||||
out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
|
||||
(out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
|
||||
(out, err) = conf.cmd_and_log(cmd, input='\\n', output=waflib.Context.STDOUT)
|
||||
try:
|
||||
conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
|
||||
except Exception ,e:
|
||||
print(e.stdout, e.stderr)
|
||||
|
||||
:param cmd: args for subprocess.Popen
|
||||
:type cmd: list or string
|
||||
:param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
|
||||
:type kw: dict
|
||||
:returns: process exit status
|
||||
:rtype: integer
|
||||
:raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
|
||||
:raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
|
||||
"""
|
||||
subprocess = Utils.subprocess
|
||||
kw['shell'] = isinstance(cmd, str)
|
||||
Logs.debug('runner: %r', cmd)
|
||||
|
||||
if 'quiet' in kw:
|
||||
quiet = kw['quiet']
|
||||
del kw['quiet']
|
||||
else:
|
||||
quiet = None
|
||||
|
||||
if 'output' in kw:
|
||||
to_ret = kw['output']
|
||||
del kw['output']
|
||||
else:
|
||||
to_ret = STDOUT
|
||||
|
||||
if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
|
||||
raise Errors.WafError('Program %r not found!' % cmd[0])
|
||||
|
||||
kw['stdout'] = kw['stderr'] = subprocess.PIPE
|
||||
if quiet is None:
|
||||
self.to_log(cmd)
|
||||
|
||||
cargs = {}
|
||||
if 'timeout' in kw:
|
||||
if sys.hexversion >= 0x3030000:
|
||||
cargs['timeout'] = kw['timeout']
|
||||
if not 'start_new_session' in kw:
|
||||
kw['start_new_session'] = True
|
||||
del kw['timeout']
|
||||
if 'input' in kw:
|
||||
if kw['input']:
|
||||
cargs['input'] = kw['input']
|
||||
kw['stdin'] = subprocess.PIPE
|
||||
del kw['input']
|
||||
|
||||
if 'cwd' in kw:
|
||||
if not isinstance(kw['cwd'], str):
|
||||
kw['cwd'] = kw['cwd'].abspath()
|
||||
|
||||
try:
|
||||
ret, out, err = Utils.run_process(cmd, kw, cargs)
|
||||
except Exception ,e:
|
||||
raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
|
||||
|
||||
if not isinstance(out, str):
|
||||
out = out.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
|
||||
if not isinstance(err, str):
|
||||
err = err.decode(sys.stdout.encoding or 'iso8859-1', errors='replace')
|
||||
|
||||
if out and quiet != STDOUT and quiet != BOTH:
|
||||
self.to_log('out: %s' % out)
|
||||
if err and quiet != STDERR and quiet != BOTH:
|
||||
self.to_log('err: %s' % err)
|
||||
|
||||
if ret:
|
||||
e = Errors.WafError('Command %r returned %r' % (cmd, ret))
|
||||
e.returncode = ret
|
||||
e.stderr = err
|
||||
e.stdout = out
|
||||
raise e
|
||||
|
||||
if to_ret == BOTH:
|
||||
return (out, err)
|
||||
elif to_ret == STDERR:
|
||||
return err
|
||||
return out
|
||||
|
||||
def fatal(self, msg, ex=None):
|
||||
"""
|
||||
Prints an error message in red and stops command execution; this is
|
||||
usually used in the configuration section::
|
||||
|
||||
def configure(conf):
|
||||
conf.fatal('a requirement is missing')
|
||||
|
||||
:param msg: message to display
|
||||
:type msg: string
|
||||
:param ex: optional exception object
|
||||
:type ex: exception
|
||||
:raises: :py:class:`waflib.Errors.ConfigurationError`
|
||||
"""
|
||||
if self.logger:
|
||||
self.logger.info('from %s: %s' % (self.path.abspath(), msg))
|
||||
try:
|
||||
msg = '%s\n(complete log in %s)' % (msg, self.logger.handlers[0].baseFilename)
|
||||
except AttributeError:
|
||||
pass
|
||||
raise self.errors.ConfigurationError(msg, ex=ex)
|
||||
|
||||
def to_log(self, msg):
|
||||
"""
|
||||
Logs information to the logger (if present), or to stderr.
|
||||
Empty messages are not printed::
|
||||
|
||||
def build(bld):
|
||||
bld.to_log('starting the build')
|
||||
|
||||
Provide a logger on the context class or override this methid if necessary.
|
||||
|
||||
:param msg: message
|
||||
:type msg: string
|
||||
"""
|
||||
if not msg:
|
||||
return
|
||||
if self.logger:
|
||||
self.logger.info(msg)
|
||||
else:
|
||||
sys.stderr.write(str(msg))
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def msg(self, *k, **kw):
|
||||
"""
|
||||
Prints a configuration message of the form ``msg: result``.
|
||||
The second part of the message will be in colors. The output
|
||||
can be disabled easly by setting ``in_msg`` to a positive value::
|
||||
|
||||
def configure(conf):
|
||||
self.in_msg = 1
|
||||
conf.msg('Checking for library foo', 'ok')
|
||||
# no output
|
||||
|
||||
:param msg: message to display to the user
|
||||
:type msg: string
|
||||
:param result: result to display
|
||||
:type result: string or boolean
|
||||
:param color: color to use, see :py:const:`waflib.Logs.colors_lst`
|
||||
:type color: string
|
||||
"""
|
||||
try:
|
||||
msg = kw['msg']
|
||||
except KeyError:
|
||||
msg = k[0]
|
||||
|
||||
self.start_msg(msg, **kw)
|
||||
|
||||
try:
|
||||
result = kw['result']
|
||||
except KeyError:
|
||||
result = k[1]
|
||||
|
||||
color = kw.get('color')
|
||||
if not isinstance(color, str):
|
||||
color = result and 'GREEN' or 'YELLOW'
|
||||
|
||||
self.end_msg(result, color, **kw)
|
||||
|
||||
def start_msg(self, *k, **kw):
|
||||
"""
|
||||
Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
|
||||
"""
|
||||
if kw.get('quiet'):
|
||||
return
|
||||
|
||||
msg = kw.get('msg') or k[0]
|
||||
try:
|
||||
if self.in_msg:
|
||||
self.in_msg += 1
|
||||
return
|
||||
except AttributeError:
|
||||
self.in_msg = 0
|
||||
self.in_msg += 1
|
||||
|
||||
try:
|
||||
self.line_just = max(self.line_just, len(msg))
|
||||
except AttributeError:
|
||||
self.line_just = max(40, len(msg))
|
||||
for x in (self.line_just * '-', msg):
|
||||
self.to_log(x)
|
||||
Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
|
||||
|
||||
def end_msg(self, *k, **kw):
|
||||
"""Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
|
||||
if kw.get('quiet'):
|
||||
return
|
||||
self.in_msg -= 1
|
||||
if self.in_msg:
|
||||
return
|
||||
|
||||
result = kw.get('result') or k[0]
|
||||
|
||||
defcolor = 'GREEN'
|
||||
if result == True:
|
||||
msg = 'ok'
|
||||
elif result == False:
|
||||
msg = 'not found'
|
||||
defcolor = 'YELLOW'
|
||||
else:
|
||||
msg = str(result)
|
||||
|
||||
self.to_log(msg)
|
||||
try:
|
||||
color = kw['color']
|
||||
except KeyError:
|
||||
if len(k) > 1 and k[1] in Logs.colors_lst:
|
||||
# compatibility waf 1.7
|
||||
color = k[1]
|
||||
else:
|
||||
color = defcolor
|
||||
Logs.pprint(color, msg)
|
||||
|
||||
def load_special_tools(self, var, ban=[]):
|
||||
"""
|
||||
Loads third-party extensions modules for certain programming languages
|
||||
by trying to list certain files in the extras/ directory. This method
|
||||
is typically called once for a programming language group, see for
|
||||
example :py:mod:`waflib.Tools.compiler_c`
|
||||
|
||||
:param var: glob expression, for example 'cxx\_\*.py'
|
||||
:type var: string
|
||||
:param ban: list of exact file names to exclude
|
||||
:type ban: list of string
|
||||
"""
|
||||
global waf_dir
|
||||
if os.path.isdir(waf_dir):
|
||||
lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
|
||||
for x in lst:
|
||||
if not x.name in ban:
|
||||
load_tool(x.name.replace('.py', ''))
|
||||
else:
|
||||
from zipfile import PyZipFile
|
||||
waflibs = PyZipFile(waf_dir)
|
||||
lst = waflibs.namelist()
|
||||
for x in lst:
|
||||
if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var):
|
||||
continue
|
||||
f = os.path.basename(x)
|
||||
doban = False
|
||||
for b in ban:
|
||||
r = b.replace('*', '.*')
|
||||
if re.match(r, f):
|
||||
doban = True
|
||||
if not doban:
|
||||
f = f.replace('.py', '')
|
||||
load_tool(f)
|
||||
|
||||
cache_modules = {}
|
||||
"""
|
||||
Dictionary holding already loaded modules (wscript), indexed by their absolute path.
|
||||
The modules are added automatically by :py:func:`waflib.Context.load_module`
|
||||
"""
|
||||
|
||||
def load_module(path, encoding=None):
|
||||
"""
|
||||
Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules`
|
||||
|
||||
:param path: file path
|
||||
:type path: string
|
||||
:return: Loaded Python module
|
||||
:rtype: module
|
||||
"""
|
||||
try:
|
||||
return cache_modules[path]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
module = imp.new_module(WSCRIPT_FILE)
|
||||
try:
|
||||
code = Utils.readf(path, m='rU', encoding=encoding)
|
||||
except EnvironmentError:
|
||||
raise Errors.WafError('Could not read the file %r' % path)
|
||||
|
||||
module_dir = os.path.dirname(path)
|
||||
sys.path.insert(0, module_dir)
|
||||
try:
|
||||
exec(compile(code, path, 'exec'), module.__dict__)
|
||||
finally:
|
||||
sys.path.remove(module_dir)
|
||||
|
||||
cache_modules[path] = module
|
||||
return module
|
||||
|
||||
def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
|
||||
"""
|
||||
Importx a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools`
|
||||
|
||||
:type tool: string
|
||||
:param tool: Name of the tool
|
||||
:type tooldir: list
|
||||
:param tooldir: List of directories to search for the tool module
|
||||
:type with_sys_path: boolean
|
||||
:param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs
|
||||
"""
|
||||
if tool == 'java':
|
||||
tool = 'javaw' # jython
|
||||
else:
|
||||
tool = tool.replace('++', 'xx')
|
||||
|
||||
if not with_sys_path:
|
||||
back_path = sys.path
|
||||
sys.path = []
|
||||
try:
|
||||
if tooldir:
|
||||
assert isinstance(tooldir, list)
|
||||
sys.path = tooldir + sys.path
|
||||
try:
|
||||
__import__(tool)
|
||||
finally:
|
||||
for d in tooldir:
|
||||
sys.path.remove(d)
|
||||
ret = sys.modules[tool]
|
||||
Context.tools[tool] = ret
|
||||
return ret
|
||||
else:
|
||||
if not with_sys_path: sys.path.insert(0, waf_dir)
|
||||
try:
|
||||
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
|
||||
try:
|
||||
__import__(x % tool)
|
||||
break
|
||||
except ImportError:
|
||||
x = None
|
||||
else: # raise an exception
|
||||
__import__(tool)
|
||||
finally:
|
||||
if not with_sys_path: sys.path.remove(waf_dir)
|
||||
ret = sys.modules[x % tool]
|
||||
Context.tools[tool] = ret
|
||||
return ret
|
||||
finally:
|
||||
if not with_sys_path:
|
||||
sys.path += back_path
|
69
third_party/waf/waflib/Errors.py
vendored
Normal file
69
third_party/waf/waflib/Errors.py
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2010-2016 (ita)
|
||||
|
||||
"""
|
||||
Exceptions used in the Waf code
|
||||
"""
|
||||
|
||||
import traceback, sys
|
||||
|
||||
class WafError(Exception):
|
||||
"""Base class for all Waf errors"""
|
||||
def __init__(self, msg='', ex=None):
|
||||
"""
|
||||
:param msg: error message
|
||||
:type msg: string
|
||||
:param ex: exception causing this error (optional)
|
||||
:type ex: exception
|
||||
"""
|
||||
self.msg = msg
|
||||
assert not isinstance(msg, Exception)
|
||||
|
||||
self.stack = []
|
||||
if ex:
|
||||
if not msg:
|
||||
self.msg = str(ex)
|
||||
if isinstance(ex, WafError):
|
||||
self.stack = ex.stack
|
||||
else:
|
||||
self.stack = traceback.extract_tb(sys.exc_info()[2])
|
||||
self.stack += traceback.extract_stack()[:-1]
|
||||
self.verbose_msg = ''.join(traceback.format_list(self.stack))
|
||||
|
||||
def __str__(self):
|
||||
return str(self.msg)
|
||||
|
||||
class BuildError(WafError):
|
||||
"""Error raised during the build and install phases"""
|
||||
def __init__(self, error_tasks=[]):
|
||||
"""
|
||||
:param error_tasks: tasks that could not complete normally
|
||||
:type error_tasks: list of task objects
|
||||
"""
|
||||
self.tasks = error_tasks
|
||||
WafError.__init__(self, self.format_error())
|
||||
|
||||
def format_error(self):
|
||||
"""Formats the error messages from the tasks that failed"""
|
||||
lst = ['Build failed']
|
||||
for tsk in self.tasks:
|
||||
txt = tsk.format_error()
|
||||
if txt: lst.append(txt)
|
||||
return '\n'.join(lst)
|
||||
|
||||
class ConfigurationError(WafError):
|
||||
"""Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`"""
|
||||
pass
|
||||
|
||||
class TaskRescan(WafError):
|
||||
"""Task-specific exception type signalling required signature recalculations"""
|
||||
pass
|
||||
|
||||
class TaskNotReady(WafError):
|
||||
"""Task-specific exception type signalling that task signatures cannot be computed"""
|
||||
pass
|
384
third_party/waf/waflib/Logs.py
vendored
Normal file
384
third_party/waf/waflib/Logs.py
vendored
Normal file
@ -0,0 +1,384 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"""
|
||||
logging, colors, terminal width and pretty-print
|
||||
"""
|
||||
|
||||
import os, re, traceback, sys
|
||||
from waflib import Utils, ansiterm
|
||||
|
||||
if not os.environ.get('NOSYNC', False):
|
||||
# synchronized output is nearly mandatory to prevent garbled output
|
||||
if sys.stdout.isatty() and id(sys.stdout) == id(sys.__stdout__):
|
||||
sys.stdout = ansiterm.AnsiTerm(sys.stdout)
|
||||
if sys.stderr.isatty() and id(sys.stderr) == id(sys.__stderr__):
|
||||
sys.stderr = ansiterm.AnsiTerm(sys.stderr)
|
||||
|
||||
# import the logging module after since it holds a reference on sys.stderr
|
||||
# in case someone uses the root logger
|
||||
import logging
|
||||
|
||||
LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
|
||||
HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S')
|
||||
|
||||
zones = []
|
||||
"""
|
||||
See :py:class:`waflib.Logs.log_filter`
|
||||
"""
|
||||
|
||||
verbose = 0
|
||||
"""
|
||||
Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error`
|
||||
"""
|
||||
|
||||
colors_lst = {
|
||||
'USE' : True,
|
||||
'BOLD' :'\x1b[01;1m',
|
||||
'RED' :'\x1b[01;31m',
|
||||
'GREEN' :'\x1b[32m',
|
||||
'YELLOW':'\x1b[33m',
|
||||
'PINK' :'\x1b[35m',
|
||||
'BLUE' :'\x1b[01;34m',
|
||||
'CYAN' :'\x1b[36m',
|
||||
'GREY' :'\x1b[37m',
|
||||
'NORMAL':'\x1b[0m',
|
||||
'cursor_on' :'\x1b[?25h',
|
||||
'cursor_off' :'\x1b[?25l',
|
||||
}
|
||||
|
||||
indicator = '\r\x1b[K%s%s%s'
|
||||
|
||||
try:
|
||||
unicode
|
||||
except NameError:
|
||||
unicode = None
|
||||
|
||||
def enable_colors(use):
|
||||
"""
|
||||
If *1* is given, then the system will perform a few verifications
|
||||
before enabling colors, such as checking whether the interpreter
|
||||
is running in a terminal. A value of zero will disable colors,
|
||||
and a value above *1* will force colors.
|
||||
|
||||
:param use: whether to enable colors or not
|
||||
:type use: integer
|
||||
"""
|
||||
if use == 1:
|
||||
if not (sys.stderr.isatty() or sys.stdout.isatty()):
|
||||
use = 0
|
||||
if Utils.is_win32 and os.name != 'java':
|
||||
term = os.environ.get('TERM', '') # has ansiterm
|
||||
else:
|
||||
term = os.environ.get('TERM', 'dumb')
|
||||
|
||||
if term in ('dumb', 'emacs'):
|
||||
use = 0
|
||||
|
||||
if use >= 1:
|
||||
os.environ['TERM'] = 'vt100'
|
||||
|
||||
colors_lst['USE'] = use
|
||||
|
||||
# If console packages are available, replace the dummy function with a real
|
||||
# implementation
|
||||
try:
|
||||
get_term_cols = ansiterm.get_term_cols
|
||||
except AttributeError:
|
||||
def get_term_cols():
|
||||
return 80
|
||||
|
||||
get_term_cols.__doc__ = """
|
||||
Returns the console width in characters.
|
||||
|
||||
:return: the number of characters per line
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
def get_color(cl):
|
||||
"""
|
||||
Returns the ansi sequence corresponding to the given color name.
|
||||
An empty string is returned when coloring is globally disabled.
|
||||
|
||||
:param cl: color name in capital letters
|
||||
:type cl: string
|
||||
"""
|
||||
if colors_lst['USE']:
|
||||
return colors_lst.get(cl, '')
|
||||
return ''
|
||||
|
||||
class color_dict(object):
|
||||
"""attribute-based color access, eg: colors.PINK"""
|
||||
def __getattr__(self, a):
|
||||
return get_color(a)
|
||||
def __call__(self, a):
|
||||
return get_color(a)
|
||||
|
||||
colors = color_dict()
|
||||
|
||||
re_log = re.compile(r'(\w+): (.*)', re.M)
|
||||
class log_filter(logging.Filter):
|
||||
"""
|
||||
Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
|
||||
For example, the following::
|
||||
|
||||
from waflib import Logs
|
||||
Logs.debug('test: here is a message')
|
||||
|
||||
Will be displayed only when executing::
|
||||
|
||||
$ waf --zones=test
|
||||
"""
|
||||
def __init__(self, name=''):
|
||||
logging.Filter.__init__(self, name)
|
||||
|
||||
def filter(self, rec):
|
||||
"""
|
||||
Filters log records by zone and by logging level
|
||||
|
||||
:param rec: log entry
|
||||
"""
|
||||
global verbose
|
||||
rec.zone = rec.module
|
||||
if rec.levelno >= logging.INFO:
|
||||
return True
|
||||
|
||||
m = re_log.match(rec.msg)
|
||||
if m:
|
||||
rec.zone = m.group(1)
|
||||
rec.msg = m.group(2)
|
||||
|
||||
if zones:
|
||||
return getattr(rec, 'zone', '') in zones or '*' in zones
|
||||
elif not verbose > 2:
|
||||
return False
|
||||
return True
|
||||
|
||||
class log_handler(logging.StreamHandler):
|
||||
"""Dispatches messages to stderr/stdout depending on the severity level"""
|
||||
def emit(self, record):
|
||||
"""
|
||||
Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override`
|
||||
"""
|
||||
# default implementation
|
||||
try:
|
||||
try:
|
||||
self.stream = record.stream
|
||||
except AttributeError:
|
||||
if record.levelno >= logging.WARNING:
|
||||
record.stream = self.stream = sys.stderr
|
||||
else:
|
||||
record.stream = self.stream = sys.stdout
|
||||
self.emit_override(record)
|
||||
self.flush()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except: # from the python library -_-
|
||||
self.handleError(record)
|
||||
|
||||
def emit_override(self, record, **kw):
|
||||
"""
|
||||
Writes the log record to the desired stream (stderr/stdout)
|
||||
"""
|
||||
self.terminator = getattr(record, 'terminator', '\n')
|
||||
stream = self.stream
|
||||
if unicode:
|
||||
# python2
|
||||
msg = self.formatter.format(record)
|
||||
fs = '%s' + self.terminator
|
||||
try:
|
||||
if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)):
|
||||
fs = fs.decode(stream.encoding)
|
||||
try:
|
||||
stream.write(fs % msg)
|
||||
except UnicodeEncodeError:
|
||||
stream.write((fs % msg).encode(stream.encoding))
|
||||
else:
|
||||
stream.write(fs % msg)
|
||||
except UnicodeError:
|
||||
stream.write((fs % msg).encode('utf-8'))
|
||||
else:
|
||||
logging.StreamHandler.emit(self, record)
|
||||
|
||||
class formatter(logging.Formatter):
|
||||
"""Simple log formatter which handles colors"""
|
||||
def __init__(self):
|
||||
logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
|
||||
|
||||
def format(self, rec):
|
||||
"""
|
||||
Formats records and adds colors as needed. The records do not get
|
||||
a leading hour format if the logging level is above *INFO*.
|
||||
"""
|
||||
try:
|
||||
msg = rec.msg.decode('utf-8')
|
||||
except Exception:
|
||||
msg = rec.msg
|
||||
|
||||
use = colors_lst['USE']
|
||||
if (use == 1 and rec.stream.isatty()) or use == 2:
|
||||
|
||||
c1 = getattr(rec, 'c1', None)
|
||||
if c1 is None:
|
||||
c1 = ''
|
||||
if rec.levelno >= logging.ERROR:
|
||||
c1 = colors.RED
|
||||
elif rec.levelno >= logging.WARNING:
|
||||
c1 = colors.YELLOW
|
||||
elif rec.levelno >= logging.INFO:
|
||||
c1 = colors.GREEN
|
||||
c2 = getattr(rec, 'c2', colors.NORMAL)
|
||||
msg = '%s%s%s' % (c1, msg, c2)
|
||||
else:
|
||||
# remove single \r that make long lines in text files
|
||||
# and other terminal commands
|
||||
msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg)
|
||||
|
||||
if rec.levelno >= logging.INFO:
|
||||
# the goal of this is to format without the leading "Logs, hour" prefix
|
||||
if rec.args:
|
||||
return msg % rec.args
|
||||
return msg
|
||||
|
||||
rec.msg = msg
|
||||
rec.c1 = colors.PINK
|
||||
rec.c2 = colors.NORMAL
|
||||
return logging.Formatter.format(self, rec)
|
||||
|
||||
log = None
|
||||
"""global logger for Logs.debug, Logs.error, etc"""
|
||||
|
||||
def debug(*k, **kw):
|
||||
"""
|
||||
Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0
|
||||
"""
|
||||
global verbose
|
||||
if verbose:
|
||||
k = list(k)
|
||||
k[0] = k[0].replace('\n', ' ')
|
||||
global log
|
||||
log.debug(*k, **kw)
|
||||
|
||||
def error(*k, **kw):
|
||||
"""
|
||||
Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2
|
||||
"""
|
||||
global log, verbose
|
||||
log.error(*k, **kw)
|
||||
if verbose > 2:
|
||||
st = traceback.extract_stack()
|
||||
if st:
|
||||
st = st[:-1]
|
||||
buf = []
|
||||
for filename, lineno, name, line in st:
|
||||
buf.append(' File %r, line %d, in %s' % (filename, lineno, name))
|
||||
if line:
|
||||
buf.append(' %s' % line.strip())
|
||||
if buf: log.error('\n'.join(buf))
|
||||
|
||||
def warn(*k, **kw):
|
||||
"""
|
||||
Wraps logging.warn
|
||||
"""
|
||||
global log
|
||||
log.warn(*k, **kw)
|
||||
|
||||
def info(*k, **kw):
|
||||
"""
|
||||
Wraps logging.info
|
||||
"""
|
||||
global log
|
||||
log.info(*k, **kw)
|
||||
|
||||
def init_log():
|
||||
"""
|
||||
Initializes the logger :py:attr:`waflib.Logs.log`
|
||||
"""
|
||||
global log
|
||||
log = logging.getLogger('waflib')
|
||||
log.handlers = []
|
||||
log.filters = []
|
||||
hdlr = log_handler()
|
||||
hdlr.setFormatter(formatter())
|
||||
log.addHandler(hdlr)
|
||||
log.addFilter(log_filter())
|
||||
log.setLevel(logging.DEBUG)
|
||||
|
||||
def make_logger(path, name):
|
||||
"""
|
||||
Creates a simple logger, which is often used to redirect the context command output::
|
||||
|
||||
from waflib import Logs
|
||||
bld.logger = Logs.make_logger('test.log', 'build')
|
||||
bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False)
|
||||
|
||||
# have the file closed immediately
|
||||
Logs.free_logger(bld.logger)
|
||||
|
||||
# stop logging
|
||||
bld.logger = None
|
||||
|
||||
The method finalize() of the command will try to free the logger, if any
|
||||
|
||||
:param path: file name to write the log output to
|
||||
:type path: string
|
||||
:param name: logger name (loggers are reused)
|
||||
:type name: string
|
||||
"""
|
||||
logger = logging.getLogger(name)
|
||||
hdlr = logging.FileHandler(path, 'w')
|
||||
formatter = logging.Formatter('%(message)s')
|
||||
hdlr.setFormatter(formatter)
|
||||
logger.addHandler(hdlr)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
return logger
|
||||
|
||||
def make_mem_logger(name, to_log, size=8192):
|
||||
"""
|
||||
Creates a memory logger to avoid writing concurrently to the main logger
|
||||
"""
|
||||
from logging.handlers import MemoryHandler
|
||||
logger = logging.getLogger(name)
|
||||
hdlr = MemoryHandler(size, target=to_log)
|
||||
formatter = logging.Formatter('%(message)s')
|
||||
hdlr.setFormatter(formatter)
|
||||
logger.addHandler(hdlr)
|
||||
logger.memhandler = hdlr
|
||||
logger.setLevel(logging.DEBUG)
|
||||
return logger
|
||||
|
||||
def free_logger(logger):
|
||||
"""
|
||||
Frees the resources held by the loggers created through make_logger or make_mem_logger.
|
||||
This is used for file cleanup and for handler removal (logger objects are re-used).
|
||||
"""
|
||||
try:
|
||||
for x in logger.handlers:
|
||||
x.close()
|
||||
logger.removeHandler(x)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def pprint(col, msg, label='', sep='\n'):
|
||||
"""
|
||||
Prints messages in color immediately on stderr::
|
||||
|
||||
from waflib import Logs
|
||||
Logs.pprint('RED', 'Something bad just happened')
|
||||
|
||||
:param col: color name to use in :py:const:`Logs.colors_lst`
|
||||
:type col: string
|
||||
:param msg: message to display
|
||||
:type msg: string or a value that can be printed by %s
|
||||
:param label: a message to add after the colored output
|
||||
:type label: string
|
||||
:param sep: a string to append at the end (line separator)
|
||||
:type sep: string
|
||||
"""
|
||||
global info
|
||||
info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})
|
944
third_party/waf/waflib/Node.py
vendored
Normal file
944
third_party/waf/waflib/Node.py
vendored
Normal file
@ -0,0 +1,944 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"""
|
||||
Node: filesystem structure
|
||||
|
||||
#. Each file/folder is represented by exactly one node.
|
||||
|
||||
#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc.
|
||||
Unused class members can increase the `.wafpickle` file size sensibly.
|
||||
|
||||
#. Node objects should never be created directly, use
|
||||
the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` for the low-level operations
|
||||
|
||||
#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` must be
|
||||
used when a build context is present
|
||||
|
||||
#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass required for serialization.
|
||||
(:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context
|
||||
owning a node is held as *self.ctx*
|
||||
"""
|
||||
|
||||
import os, re, sys, shutil
|
||||
from waflib import Utils, Errors
|
||||
|
||||
exclude_regs = '''
|
||||
**/*~
|
||||
**/#*#
|
||||
**/.#*
|
||||
**/%*%
|
||||
**/._*
|
||||
**/CVS
|
||||
**/CVS/**
|
||||
**/.cvsignore
|
||||
**/SCCS
|
||||
**/SCCS/**
|
||||
**/vssver.scc
|
||||
**/.svn
|
||||
**/.svn/**
|
||||
**/BitKeeper
|
||||
**/.git
|
||||
**/.git/**
|
||||
**/.gitignore
|
||||
**/.bzr
|
||||
**/.bzrignore
|
||||
**/.bzr/**
|
||||
**/.hg
|
||||
**/.hg/**
|
||||
**/_MTN
|
||||
**/_MTN/**
|
||||
**/.arch-ids
|
||||
**/{arch}
|
||||
**/_darcs
|
||||
**/_darcs/**
|
||||
**/.intlcache
|
||||
**/.DS_Store'''
|
||||
"""
|
||||
Ant patterns for files and folders to exclude while doing the
|
||||
recursive traversal in :py:meth:`waflib.Node.Node.ant_glob`
|
||||
"""
|
||||
|
||||
class Node(object):
|
||||
"""
|
||||
This class is organized in two parts:
|
||||
|
||||
* The basic methods meant for filesystem access (compute paths, create folders, etc)
|
||||
* The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``)
|
||||
"""
|
||||
|
||||
dict_class = dict
|
||||
"""
|
||||
Subclasses can provide a dict class to enable case insensitivity for example.
|
||||
"""
|
||||
|
||||
__slots__ = ('name', 'parent', 'children', 'cache_abspath', 'cache_isdir')
|
||||
def __init__(self, name, parent):
|
||||
"""
|
||||
.. note:: Use :py:func:`Node.make_node` or :py:func:`Node.find_node` instead of calling this constructor
|
||||
"""
|
||||
self.name = name
|
||||
self.parent = parent
|
||||
if parent:
|
||||
if name in parent.children:
|
||||
raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent))
|
||||
parent.children[name] = self
|
||||
|
||||
def __setstate__(self, data):
|
||||
"Deserializes node information, used for persistence"
|
||||
self.name = data[0]
|
||||
self.parent = data[1]
|
||||
if data[2] is not None:
|
||||
# Issue 1480
|
||||
self.children = self.dict_class(data[2])
|
||||
|
||||
def __getstate__(self):
|
||||
"Serializes node information, used for persistence"
|
||||
return (self.name, self.parent, getattr(self, 'children', None))
|
||||
|
||||
def __str__(self):
|
||||
"""
|
||||
String representation (abspath), for debugging purposes
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
return self.abspath()
|
||||
|
||||
def __repr__(self):
|
||||
"""
|
||||
String representation (abspath), for debugging purposes
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
return self.abspath()
|
||||
|
||||
def __copy__(self):
|
||||
"""
|
||||
Provided to prevent nodes from being copied
|
||||
|
||||
:raises: :py:class:`waflib.Errors.WafError`
|
||||
"""
|
||||
raise Errors.WafError('nodes are not supposed to be copied')
|
||||
|
||||
def read(self, flags='r', encoding='ISO8859-1'):
|
||||
"""
|
||||
Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`::
|
||||
|
||||
def build(bld):
|
||||
bld.path.find_node('wscript').read()
|
||||
|
||||
:param flags: Open mode
|
||||
:type flags: string
|
||||
:param encoding: encoding value for Python3
|
||||
:type encoding: string
|
||||
:rtype: string or bytes
|
||||
:return: File contents
|
||||
"""
|
||||
return Utils.readf(self.abspath(), flags, encoding)
|
||||
|
||||
def write(self, data, flags='w', encoding='ISO8859-1'):
|
||||
"""
|
||||
Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`::
|
||||
|
||||
def build(bld):
|
||||
bld.path.make_node('foo.txt').write('Hello, world!')
|
||||
|
||||
:param data: data to write
|
||||
:type data: string
|
||||
:param flags: Write mode
|
||||
:type flags: string
|
||||
:param encoding: encoding value for Python3
|
||||
:type encoding: string
|
||||
"""
|
||||
Utils.writef(self.abspath(), data, flags, encoding)
|
||||
|
||||
def read_json(self, convert=True, encoding='utf-8'):
|
||||
"""
|
||||
Reads and parses the contents of this node as JSON (Python ≥ 2.6)::
|
||||
|
||||
def build(bld):
|
||||
bld.path.find_node('abc.json').read_json()
|
||||
|
||||
Note that this by default automatically decodes unicode strings on Python2, unlike what the Python JSON module does.
|
||||
|
||||
:type convert: boolean
|
||||
:param convert: Prevents decoding of unicode strings on Python2
|
||||
:type encoding: string
|
||||
:param encoding: The encoding of the file to read. This default to UTF8 as per the JSON standard
|
||||
:rtype: object
|
||||
:return: Parsed file contents
|
||||
"""
|
||||
import json # Python 2.6 and up
|
||||
object_pairs_hook = None
|
||||
if convert and sys.hexversion < 0x3000000:
|
||||
try:
|
||||
_type = unicode
|
||||
except NameError:
|
||||
_type = str
|
||||
|
||||
def convert(value):
|
||||
if isinstance(value, list):
|
||||
return [convert(element) for element in value]
|
||||
elif isinstance(value, _type):
|
||||
return str(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
def object_pairs(pairs):
|
||||
return dict((str(pair[0]), convert(pair[1])) for pair in pairs)
|
||||
|
||||
object_pairs_hook = object_pairs
|
||||
|
||||
return json.loads(self.read(encoding=encoding), object_pairs_hook=object_pairs_hook)
|
||||
|
||||
def write_json(self, data, pretty=True):
|
||||
"""
|
||||
Writes a python object as JSON to disk (Python ≥ 2.6) as UTF-8 data (JSON standard)::
|
||||
|
||||
def build(bld):
|
||||
bld.path.find_node('xyz.json').write_json(199)
|
||||
|
||||
:type data: object
|
||||
:param data: The data to write to disk
|
||||
:type pretty: boolean
|
||||
:param pretty: Determines if the JSON will be nicely space separated
|
||||
"""
|
||||
import json # Python 2.6 and up
|
||||
indent = 2
|
||||
separators = (',', ': ')
|
||||
sort_keys = pretty
|
||||
newline = os.linesep
|
||||
if not pretty:
|
||||
indent = None
|
||||
separators = (',', ':')
|
||||
newline = ''
|
||||
output = json.dumps(data, indent=indent, separators=separators, sort_keys=sort_keys) + newline
|
||||
self.write(output, encoding='utf-8')
|
||||
|
||||
def exists(self):
|
||||
"""
|
||||
Returns whether the Node is present on the filesystem
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return os.path.exists(self.abspath())
|
||||
|
||||
def isdir(self):
|
||||
"""
|
||||
Returns whether the Node represents a folder
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return os.path.isdir(self.abspath())
|
||||
|
||||
def chmod(self, val):
|
||||
"""
|
||||
Changes the file/dir permissions::
|
||||
|
||||
def build(bld):
|
||||
bld.path.chmod(493) # 0755
|
||||
"""
|
||||
os.chmod(self.abspath(), val)
|
||||
|
||||
def delete(self, evict=True):
|
||||
"""
|
||||
Removes the file/folder from the filesystem (equivalent to `rm -rf`), and remove this object from the Node tree.
|
||||
Do not use this object after calling this method.
|
||||
"""
|
||||
try:
|
||||
try:
|
||||
if os.path.isdir(self.abspath()):
|
||||
shutil.rmtree(self.abspath())
|
||||
else:
|
||||
os.remove(self.abspath())
|
||||
except OSError:
|
||||
if os.path.exists(self.abspath()):
|
||||
raise
|
||||
finally:
|
||||
if evict:
|
||||
self.evict()
|
||||
|
||||
def evict(self):
|
||||
"""
|
||||
Removes this node from the Node tree
|
||||
"""
|
||||
del self.parent.children[self.name]
|
||||
|
||||
def suffix(self):
|
||||
"""
|
||||
Returns the file rightmost extension, for example `a.b.c.d → .d`
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
k = max(0, self.name.rfind('.'))
|
||||
return self.name[k:]
|
||||
|
||||
def height(self):
|
||||
"""
|
||||
Returns the depth in the folder hierarchy from the filesystem root or from all the file drives
|
||||
|
||||
:returns: filesystem depth
|
||||
:rtype: integer
|
||||
"""
|
||||
d = self
|
||||
val = -1
|
||||
while d:
|
||||
d = d.parent
|
||||
val += 1
|
||||
return val
|
||||
|
||||
def listdir(self):
|
||||
"""
|
||||
Lists the folder contents
|
||||
|
||||
:returns: list of file/folder names ordered alphabetically
|
||||
:rtype: list of string
|
||||
"""
|
||||
lst = Utils.listdir(self.abspath())
|
||||
lst.sort()
|
||||
return lst
|
||||
|
||||
def mkdir(self):
|
||||
"""
|
||||
Creates a folder represented by this node. Intermediate folders are created as needed.
|
||||
|
||||
:raises: :py:class:`waflib.Errors.WafError` when the folder is missing
|
||||
"""
|
||||
if self.isdir():
|
||||
return
|
||||
|
||||
try:
|
||||
self.parent.mkdir()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if self.name:
|
||||
try:
|
||||
os.makedirs(self.abspath())
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if not self.isdir():
|
||||
raise Errors.WafError('Could not create the directory %r' % self)
|
||||
|
||||
try:
|
||||
self.children
|
||||
except AttributeError:
|
||||
self.children = self.dict_class()
|
||||
|
||||
def find_node(self, lst):
|
||||
"""
|
||||
Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists
|
||||
|
||||
:param lst: relative path
|
||||
:type lst: string or list of string
|
||||
:returns: The corresponding Node object or None if no entry was found on the filesystem
|
||||
:rtype: :py:class:´waflib.Node.Node´
|
||||
"""
|
||||
|
||||
if isinstance(lst, str):
|
||||
lst = [x for x in Utils.split_path(lst) if x and x != '.']
|
||||
|
||||
cur = self
|
||||
for x in lst:
|
||||
if x == '..':
|
||||
cur = cur.parent or cur
|
||||
continue
|
||||
|
||||
try:
|
||||
ch = cur.children
|
||||
except AttributeError:
|
||||
cur.children = self.dict_class()
|
||||
else:
|
||||
try:
|
||||
cur = ch[x]
|
||||
continue
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# optimistic: create the node first then look if it was correct to do so
|
||||
cur = self.__class__(x, cur)
|
||||
if not cur.exists():
|
||||
cur.evict()
|
||||
return None
|
||||
|
||||
if not cur.exists():
|
||||
cur.evict()
|
||||
return None
|
||||
|
||||
return cur
|
||||
|
||||
def make_node(self, lst):
|
||||
"""
|
||||
Returns or creates a Node object corresponding to the input path without considering the filesystem.
|
||||
|
||||
:param lst: relative path
|
||||
:type lst: string or list of string
|
||||
:rtype: :py:class:´waflib.Node.Node´
|
||||
"""
|
||||
if isinstance(lst, str):
|
||||
lst = [x for x in Utils.split_path(lst) if x and x != '.']
|
||||
|
||||
cur = self
|
||||
for x in lst:
|
||||
if x == '..':
|
||||
cur = cur.parent or cur
|
||||
continue
|
||||
|
||||
try:
|
||||
cur = cur.children[x]
|
||||
except AttributeError:
|
||||
cur.children = self.dict_class()
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
continue
|
||||
cur = self.__class__(x, cur)
|
||||
return cur
|
||||
|
||||
def search_node(self, lst):
|
||||
"""
|
||||
Returns a Node previously defined in the data structure. The filesystem is not considered.
|
||||
|
||||
:param lst: relative path
|
||||
:type lst: string or list of string
|
||||
:rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure
|
||||
"""
|
||||
if isinstance(lst, str):
|
||||
lst = [x for x in Utils.split_path(lst) if x and x != '.']
|
||||
|
||||
cur = self
|
||||
for x in lst:
|
||||
if x == '..':
|
||||
cur = cur.parent or cur
|
||||
else:
|
||||
try:
|
||||
cur = cur.children[x]
|
||||
except (AttributeError, KeyError):
|
||||
return None
|
||||
return cur
|
||||
|
||||
def path_from(self, node):
|
||||
"""
|
||||
Path of this node seen from the other::
|
||||
|
||||
def build(bld):
|
||||
n1 = bld.path.find_node('foo/bar/xyz.txt')
|
||||
n2 = bld.path.find_node('foo/stuff/')
|
||||
n1.path_from(n2) # '../bar/xyz.txt'
|
||||
|
||||
:param node: path to use as a reference
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
:returns: a relative path or an absolute one if that is better
|
||||
:rtype: string
|
||||
"""
|
||||
c1 = self
|
||||
c2 = node
|
||||
|
||||
c1h = c1.height()
|
||||
c2h = c2.height()
|
||||
|
||||
lst = []
|
||||
up = 0
|
||||
|
||||
while c1h > c2h:
|
||||
lst.append(c1.name)
|
||||
c1 = c1.parent
|
||||
c1h -= 1
|
||||
|
||||
while c2h > c1h:
|
||||
up += 1
|
||||
c2 = c2.parent
|
||||
c2h -= 1
|
||||
|
||||
while not c1 is c2:
|
||||
lst.append(c1.name)
|
||||
up += 1
|
||||
|
||||
c1 = c1.parent
|
||||
c2 = c2.parent
|
||||
|
||||
if c1.parent:
|
||||
lst.extend(['..'] * up)
|
||||
lst.reverse()
|
||||
return os.sep.join(lst) or '.'
|
||||
else:
|
||||
return self.abspath()
|
||||
|
||||
def abspath(self):
|
||||
"""
|
||||
Returns the absolute path. A cache is kept in the context as ``cache_node_abspath``
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
try:
|
||||
return self.cache_abspath
|
||||
except AttributeError:
|
||||
pass
|
||||
# think twice before touching this (performance + complexity + correctness)
|
||||
|
||||
if not self.parent:
|
||||
val = os.sep
|
||||
elif not self.parent.name:
|
||||
val = os.sep + self.name
|
||||
else:
|
||||
val = self.parent.abspath() + os.sep + self.name
|
||||
self.cache_abspath = val
|
||||
return val
|
||||
|
||||
if Utils.is_win32:
|
||||
def abspath(self):
|
||||
try:
|
||||
return self.cache_abspath
|
||||
except AttributeError:
|
||||
pass
|
||||
if not self.parent:
|
||||
val = ''
|
||||
elif not self.parent.name:
|
||||
val = self.name + os.sep
|
||||
else:
|
||||
val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name
|
||||
self.cache_abspath = val
|
||||
return val
|
||||
|
||||
def is_child_of(self, node):
|
||||
"""
|
||||
Returns whether the object belongs to a subtree of the input node::
|
||||
|
||||
def build(bld):
|
||||
node = bld.path.find_node('wscript')
|
||||
node.is_child_of(bld.path) # True
|
||||
|
||||
:param node: path to use as a reference
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
:rtype: bool
|
||||
"""
|
||||
p = self
|
||||
diff = self.height() - node.height()
|
||||
while diff > 0:
|
||||
diff -= 1
|
||||
p = p.parent
|
||||
return p is node
|
||||
|
||||
def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True):
|
||||
"""
|
||||
Recursive method used by :py:meth:`waflib.Node.ant_glob`.
|
||||
|
||||
:param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion
|
||||
:type accept: function
|
||||
:param maxdepth: maximum depth in the filesystem (25)
|
||||
:type maxdepth: int
|
||||
:param pats: list of patterns to accept and list of patterns to exclude
|
||||
:type pats: tuple
|
||||
:param dir: return folders too (False by default)
|
||||
:type dir: bool
|
||||
:param src: return files (True by default)
|
||||
:type src: bool
|
||||
:param remove: remove files/folders that do not exist (True by default)
|
||||
:type remove: bool
|
||||
:returns: A generator object to iterate from
|
||||
:rtype: iterator
|
||||
"""
|
||||
dircont = self.listdir()
|
||||
dircont.sort()
|
||||
|
||||
try:
|
||||
lst = set(self.children.keys())
|
||||
except AttributeError:
|
||||
self.children = self.dict_class()
|
||||
else:
|
||||
if remove:
|
||||
for x in lst - set(dircont):
|
||||
self.children[x].evict()
|
||||
|
||||
for name in dircont:
|
||||
npats = accept(name, pats)
|
||||
if npats and npats[0]:
|
||||
accepted = [] in npats[0]
|
||||
|
||||
node = self.make_node([name])
|
||||
|
||||
isdir = node.isdir()
|
||||
if accepted:
|
||||
if isdir:
|
||||
if dir:
|
||||
yield node
|
||||
else:
|
||||
if src:
|
||||
yield node
|
||||
|
||||
if isdir:
|
||||
node.cache_isdir = True
|
||||
if maxdepth:
|
||||
for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove):
|
||||
yield k
|
||||
raise StopIteration
|
||||
|
||||
def ant_glob(self, *k, **kw):
|
||||
"""
|
||||
Finds files across folders:
|
||||
|
||||
* ``**/*`` find all files recursively
|
||||
* ``**/*.class`` find all files ending by .class
|
||||
* ``..`` find files having two dot characters
|
||||
|
||||
For example::
|
||||
|
||||
def configure(cfg):
|
||||
cfg.path.ant_glob('**/*.cpp') # finds all .cpp files
|
||||
cfg.root.ant_glob('etc/*.txt') # matching from the filesystem root can be slow
|
||||
cfg.path.ant_glob('*.cpp', excl=['*.c'], src=True, dir=False)
|
||||
|
||||
For more information see http://ant.apache.org/manual/dirtasks.html
|
||||
|
||||
The nodes that correspond to files and folders that do not exist are garbage-collected.
|
||||
To prevent this behaviour in particular when running over the build directory, pass ``remove=False``
|
||||
|
||||
:param incl: ant patterns or list of patterns to include
|
||||
:type incl: string or list of strings
|
||||
:param excl: ant patterns or list of patterns to exclude
|
||||
:type excl: string or list of strings
|
||||
:param dir: return folders too (False by default)
|
||||
:type dir: bool
|
||||
:param src: return files (True by default)
|
||||
:type src: bool
|
||||
:param remove: remove files/folders that do not exist (True by default)
|
||||
:type remove: bool
|
||||
:param maxdepth: maximum depth of recursion
|
||||
:type maxdepth: int
|
||||
:param ignorecase: ignore case while matching (False by default)
|
||||
:type ignorecase: bool
|
||||
:returns: The corresponding Nodes
|
||||
:rtype: list of :py:class:`waflib.Node.Node` instances
|
||||
"""
|
||||
|
||||
src = kw.get('src', True)
|
||||
dir = kw.get('dir', False)
|
||||
|
||||
excl = kw.get('excl', exclude_regs)
|
||||
incl = k and k[0] or kw.get('incl', '**')
|
||||
reflags = kw.get('ignorecase', 0) and re.I
|
||||
|
||||
def to_pat(s):
|
||||
lst = Utils.to_list(s)
|
||||
ret = []
|
||||
for x in lst:
|
||||
x = x.replace('\\', '/').replace('//', '/')
|
||||
if x.endswith('/'):
|
||||
x += '**'
|
||||
lst2 = x.split('/')
|
||||
accu = []
|
||||
for k in lst2:
|
||||
if k == '**':
|
||||
accu.append(k)
|
||||
else:
|
||||
k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
|
||||
k = '^%s$' % k
|
||||
try:
|
||||
#print "pattern", k
|
||||
accu.append(re.compile(k, flags=reflags))
|
||||
except Exception ,e:
|
||||
raise Errors.WafError('Invalid pattern: %s' % k, e)
|
||||
ret.append(accu)
|
||||
return ret
|
||||
|
||||
def filtre(name, nn):
|
||||
ret = []
|
||||
for lst in nn:
|
||||
if not lst:
|
||||
pass
|
||||
elif lst[0] == '**':
|
||||
ret.append(lst)
|
||||
if len(lst) > 1:
|
||||
if lst[1].match(name):
|
||||
ret.append(lst[2:])
|
||||
else:
|
||||
ret.append([])
|
||||
elif lst[0].match(name):
|
||||
ret.append(lst[1:])
|
||||
return ret
|
||||
|
||||
def accept(name, pats):
|
||||
nacc = filtre(name, pats[0])
|
||||
nrej = filtre(name, pats[1])
|
||||
if [] in nrej:
|
||||
nacc = []
|
||||
return [nacc, nrej]
|
||||
|
||||
ret = [x for x in self.ant_iter(accept=accept, pats=[to_pat(incl), to_pat(excl)], maxdepth=kw.get('maxdepth', 25), dir=dir, src=src, remove=kw.get('remove', True))]
|
||||
if kw.get('flat', False):
|
||||
return ' '.join([x.path_from(self) for x in ret])
|
||||
|
||||
return ret
|
||||
|
||||
# --------------------------------------------------------------------------------
|
||||
# the following methods require the source/build folders (bld.srcnode/bld.bldnode)
|
||||
# using a subclass is a possibility, but is that really necessary?
|
||||
# --------------------------------------------------------------------------------
|
||||
|
||||
def is_src(self):
|
||||
"""
|
||||
Returns True if the node is below the source directory. Note that ``!is_src() ≠ is_bld()``
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
cur = self
|
||||
x = self.ctx.srcnode
|
||||
y = self.ctx.bldnode
|
||||
while cur.parent:
|
||||
if cur is y:
|
||||
return False
|
||||
if cur is x:
|
||||
return True
|
||||
cur = cur.parent
|
||||
return False
|
||||
|
||||
def is_bld(self):
|
||||
"""
|
||||
Returns True if the node is below the build directory. Note that ``!is_bld() ≠ is_src()``
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
cur = self
|
||||
y = self.ctx.bldnode
|
||||
while cur.parent:
|
||||
if cur is y:
|
||||
return True
|
||||
cur = cur.parent
|
||||
return False
|
||||
|
||||
def get_src(self):
|
||||
"""
|
||||
Returns the corresponding Node object in the source directory (or self if already
|
||||
under the source directory). Use this method only if the purpose is to create
|
||||
a Node object (this is common with folders but not with files, see ticket 1937)
|
||||
|
||||
:rtype: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
cur = self
|
||||
x = self.ctx.srcnode
|
||||
y = self.ctx.bldnode
|
||||
lst = []
|
||||
while cur.parent:
|
||||
if cur is y:
|
||||
lst.reverse()
|
||||
return x.make_node(lst)
|
||||
if cur is x:
|
||||
return self
|
||||
lst.append(cur.name)
|
||||
cur = cur.parent
|
||||
return self
|
||||
|
||||
def get_bld(self):
|
||||
"""
|
||||
Return the corresponding Node object in the build directory (or self if already
|
||||
under the build directory). Use this method only if the purpose is to create
|
||||
a Node object (this is common with folders but not with files, see ticket 1937)
|
||||
|
||||
:rtype: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
cur = self
|
||||
x = self.ctx.srcnode
|
||||
y = self.ctx.bldnode
|
||||
lst = []
|
||||
while cur.parent:
|
||||
if cur is y:
|
||||
return self
|
||||
if cur is x:
|
||||
lst.reverse()
|
||||
return self.ctx.bldnode.make_node(lst)
|
||||
lst.append(cur.name)
|
||||
cur = cur.parent
|
||||
# the file is external to the current project, make a fake root in the current build directory
|
||||
lst.reverse()
|
||||
if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'):
|
||||
lst[0] = lst[0][0]
|
||||
return self.ctx.bldnode.make_node(['__root__'] + lst)
|
||||
|
||||
def find_resource(self, lst):
|
||||
"""
|
||||
Use this method in the build phase to find source files corresponding to the relative path given.
|
||||
|
||||
First it looks up the Node data structure to find any declared Node object in the build directory.
|
||||
If None is found, it then considers the filesystem in the source directory.
|
||||
|
||||
:param lst: relative path
|
||||
:type lst: string or list of string
|
||||
:returns: the corresponding Node object or None
|
||||
:rtype: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
if isinstance(lst, str):
|
||||
lst = [x for x in Utils.split_path(lst) if x and x != '.']
|
||||
|
||||
node = self.get_bld().search_node(lst)
|
||||
if not node:
|
||||
node = self.get_src().find_node(lst)
|
||||
if node and node.isdir():
|
||||
return None
|
||||
return node
|
||||
|
||||
def find_or_declare(self, lst):
|
||||
"""
|
||||
Use this method in the build phase to declare output files.
|
||||
|
||||
If 'self' is in build directory, it first tries to return an existing node object.
|
||||
If no Node is found, it tries to find one in the source directory.
|
||||
If no Node is found, a new Node object is created in the build directory, and the
|
||||
intermediate folders are added.
|
||||
|
||||
:param lst: relative path
|
||||
:type lst: string or list of string
|
||||
"""
|
||||
if isinstance(lst, str):
|
||||
lst = [x for x in Utils.split_path(lst) if x and x != '.']
|
||||
|
||||
node = self.get_bld().search_node(lst)
|
||||
if node:
|
||||
if not os.path.isfile(node.abspath()):
|
||||
node.parent.mkdir()
|
||||
return node
|
||||
self = self.get_src()
|
||||
node = self.find_node(lst)
|
||||
if node:
|
||||
return node
|
||||
node = self.get_bld().make_node(lst)
|
||||
node.parent.mkdir()
|
||||
return node
|
||||
|
||||
def find_dir(self, lst):
|
||||
"""
|
||||
Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`)
|
||||
|
||||
:param lst: relative path
|
||||
:type lst: string or list of string
|
||||
:returns: The corresponding Node object or None if there is no such folder
|
||||
:rtype: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
if isinstance(lst, str):
|
||||
lst = [x for x in Utils.split_path(lst) if x and x != '.']
|
||||
|
||||
node = self.find_node(lst)
|
||||
if node and not node.isdir():
|
||||
return None
|
||||
return node
|
||||
|
||||
# helpers for building things
|
||||
def change_ext(self, ext, ext_in=None):
|
||||
"""
|
||||
Declares a build node with a distinct extension; this is uses :py:meth:`waflib.Node.Node.find_or_declare`
|
||||
|
||||
:return: A build node of the same path, but with a different extension
|
||||
:rtype: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
name = self.name
|
||||
if ext_in is None:
|
||||
k = name.rfind('.')
|
||||
if k >= 0:
|
||||
name = name[:k] + ext
|
||||
else:
|
||||
name = name + ext
|
||||
else:
|
||||
name = name[:- len(ext_in)] + ext
|
||||
|
||||
return self.parent.find_or_declare([name])
|
||||
|
||||
def bldpath(self):
|
||||
"""
|
||||
Returns the relative path seen from the build directory ``src/foo.cpp``
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
return self.path_from(self.ctx.bldnode)
|
||||
|
||||
def srcpath(self):
|
||||
"""
|
||||
Returns the relative path seen from the source directory ``../src/foo.cpp``
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
return self.path_from(self.ctx.srcnode)
|
||||
|
||||
def relpath(self):
|
||||
"""
|
||||
If a file in the build directory, returns :py:meth:`waflib.Node.Node.bldpath`,
|
||||
else returns :py:meth:`waflib.Node.Node.srcpath`
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
cur = self
|
||||
x = self.ctx.bldnode
|
||||
while cur.parent:
|
||||
if cur is x:
|
||||
return self.bldpath()
|
||||
cur = cur.parent
|
||||
return self.srcpath()
|
||||
|
||||
def bld_dir(self):
|
||||
"""
|
||||
Equivalent to self.parent.bldpath()
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
return self.parent.bldpath()
|
||||
|
||||
def h_file(self):
|
||||
"""
|
||||
See :py:func:`waflib.Utils.h_file`
|
||||
|
||||
:return: a hash representing the file contents
|
||||
:rtype: string or bytes
|
||||
"""
|
||||
return Utils.h_file(self.abspath())
|
||||
|
||||
def get_bld_sig(self):
|
||||
"""
|
||||
Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose
|
||||
of build dependency calculation. This method uses a per-context cache.
|
||||
|
||||
:return: a hash representing the object contents
|
||||
:rtype: string or bytes
|
||||
"""
|
||||
# previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node
|
||||
try:
|
||||
cache = self.ctx.cache_sig
|
||||
except AttributeError:
|
||||
cache = self.ctx.cache_sig = {}
|
||||
try:
|
||||
ret = cache[self]
|
||||
except KeyError:
|
||||
p = self.abspath()
|
||||
try:
|
||||
ret = cache[self] = self.h_file()
|
||||
except EnvironmentError:
|
||||
if self.isdir():
|
||||
# allow folders as build nodes, do not use the creation time
|
||||
st = os.stat(p)
|
||||
ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode])
|
||||
return ret
|
||||
raise
|
||||
return ret
|
||||
|
||||
# --------------------------------------------
|
||||
# TODO waf 2.0, remove the sig and cache_sig attributes
|
||||
def get_sig(self):
|
||||
return self.h_file()
|
||||
def set_sig(self, val):
|
||||
# clear the cache, so that past implementation should still work
|
||||
try:
|
||||
del self.get_bld_sig.__cache__[(self,)]
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
sig = property(get_sig, set_sig)
|
||||
cache_sig = property(get_sig, set_sig)
|
||||
|
||||
pickle_lock = Utils.threading.Lock()
|
||||
"""Lock mandatory for thread-safe node serialization"""
|
||||
|
||||
class Nod3(Node):
|
||||
"""Mandatory subclass for thread-safe node serialization"""
|
||||
pass # do not remove
|
281
third_party/waf/waflib/Options.py
vendored
Normal file
281
third_party/waf/waflib/Options.py
vendored
Normal file
@ -0,0 +1,281 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Scott Newton, 2005 (scottn)
|
||||
# Thomas Nagy, 2006-2016 (ita)
|
||||
|
||||
"""
|
||||
Support for waf command-line options
|
||||
|
||||
Provides default and command-line options, as well the command
|
||||
that reads the ``options`` wscript function.
|
||||
"""
|
||||
|
||||
import os, tempfile, optparse, sys, re
|
||||
from waflib import Logs, Utils, Context, Errors
|
||||
|
||||
options = {}
|
||||
"""
|
||||
A global dictionary representing user-provided command-line options::
|
||||
|
||||
$ waf --foo=bar
|
||||
"""
|
||||
|
||||
commands = []
|
||||
"""
|
||||
List of commands to execute extracted from the command-line. This list
|
||||
is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
|
||||
"""
|
||||
|
||||
envvars = []
|
||||
"""
|
||||
List of environment variable declarations placed after the Waf executable name.
|
||||
These are detected by searching for "=" in the remaining arguments.
|
||||
You probably do not want to use this.
|
||||
"""
|
||||
|
||||
lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
|
||||
"""
|
||||
Name of the lock file that marks a project as configured
|
||||
"""
|
||||
|
||||
class opt_parser(optparse.OptionParser):
|
||||
"""
|
||||
Command-line options parser.
|
||||
"""
|
||||
def __init__(self, ctx):
|
||||
optparse.OptionParser.__init__(self, conflict_handler="resolve",
|
||||
version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
|
||||
self.formatter.width = Logs.get_term_cols()
|
||||
self.ctx = ctx
|
||||
|
||||
def print_usage(self, file=None):
|
||||
return self.print_help(file)
|
||||
|
||||
def get_usage(self):
|
||||
"""
|
||||
Builds the message to print on ``waf --help``
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
cmds_str = {}
|
||||
for cls in Context.classes:
|
||||
if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ):
|
||||
continue
|
||||
|
||||
s = cls.__doc__ or ''
|
||||
cmds_str[cls.cmd] = s
|
||||
|
||||
if Context.g_module:
|
||||
for (k, v) in Context.g_module.__dict__.items():
|
||||
if k in ('options', 'init', 'shutdown'):
|
||||
continue
|
||||
|
||||
if type(v) is type(Context.create_context):
|
||||
if v.__doc__ and not k.startswith('_'):
|
||||
cmds_str[k] = v.__doc__
|
||||
|
||||
just = 0
|
||||
for k in cmds_str:
|
||||
just = max(just, len(k))
|
||||
|
||||
lst = [' %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()]
|
||||
lst.sort()
|
||||
ret = '\n'.join(lst)
|
||||
|
||||
return '''waf [commands] [options]
|
||||
|
||||
Main commands (example: ./waf build -j4)
|
||||
%s
|
||||
''' % ret
|
||||
|
||||
|
||||
class OptionsContext(Context.Context):
|
||||
"""
|
||||
Collects custom options from wscript files and parses the command line.
|
||||
Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
|
||||
"""
|
||||
cmd = 'options'
|
||||
fun = 'options'
|
||||
|
||||
def __init__(self, **kw):
|
||||
super(OptionsContext, self).__init__(**kw)
|
||||
|
||||
self.parser = opt_parser(self)
|
||||
"""Instance of :py:class:`waflib.Options.opt_parser`"""
|
||||
|
||||
self.option_groups = {}
|
||||
|
||||
jobs = self.jobs()
|
||||
p = self.add_option
|
||||
color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
|
||||
if os.environ.get('CLICOLOR', '') == '0':
|
||||
color = 'no'
|
||||
elif os.environ.get('CLICOLOR_FORCE', '') == '1':
|
||||
color = 'yes'
|
||||
p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
|
||||
p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
|
||||
p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)')
|
||||
p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]')
|
||||
p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)')
|
||||
p('--profile', dest='profile', default='', action='store_true', help=optparse.SUPPRESS_HELP)
|
||||
|
||||
gr = self.add_option_group('Configuration options')
|
||||
self.option_groups['configure options'] = gr
|
||||
|
||||
gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
|
||||
gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
|
||||
|
||||
gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
|
||||
gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
|
||||
gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
|
||||
|
||||
default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
|
||||
if not default_prefix:
|
||||
if Utils.unversioned_sys_platform() == 'win32':
|
||||
d = tempfile.gettempdir()
|
||||
default_prefix = d[0].upper() + d[1:]
|
||||
# win32 preserves the case, but gettempdir does not
|
||||
else:
|
||||
default_prefix = '/usr/local/'
|
||||
gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix)
|
||||
gr.add_option('--bindir', dest='bindir', help='bindir')
|
||||
gr.add_option('--libdir', dest='libdir', help='libdir')
|
||||
|
||||
gr = self.add_option_group('Build and installation options')
|
||||
self.option_groups['build and install options'] = gr
|
||||
gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
|
||||
gr.add_option('--targets', dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
|
||||
|
||||
gr = self.add_option_group('Step options')
|
||||
self.option_groups['step options'] = gr
|
||||
gr.add_option('--files', dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
|
||||
|
||||
default_destdir = os.environ.get('DESTDIR', '')
|
||||
|
||||
gr = self.add_option_group('Installation and uninstallation options')
|
||||
self.option_groups['install/uninstall options'] = gr
|
||||
gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
|
||||
gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation')
|
||||
gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store')
|
||||
|
||||
def jobs(self):
|
||||
"""
|
||||
Finds the optimal amount of cpu cores to use for parallel jobs.
|
||||
At runtime the options can be obtained from :py:const:`waflib.Options.options` ::
|
||||
|
||||
from waflib.Options import options
|
||||
njobs = options.jobs
|
||||
|
||||
:return: the amount of cpu cores
|
||||
:rtype: int
|
||||
"""
|
||||
count = int(os.environ.get('JOBS', 0))
|
||||
if count < 1:
|
||||
if 'NUMBER_OF_PROCESSORS' in os.environ:
|
||||
# on Windows, use the NUMBER_OF_PROCESSORS environment variable
|
||||
count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
|
||||
else:
|
||||
# on everything else, first try the POSIX sysconf values
|
||||
if hasattr(os, 'sysconf_names'):
|
||||
if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
|
||||
count = int(os.sysconf('SC_NPROCESSORS_ONLN'))
|
||||
elif 'SC_NPROCESSORS_CONF' in os.sysconf_names:
|
||||
count = int(os.sysconf('SC_NPROCESSORS_CONF'))
|
||||
if not count and os.name not in ('nt', 'java'):
|
||||
try:
|
||||
tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
|
||||
except Errors.WafError:
|
||||
pass
|
||||
else:
|
||||
if re.match('^[0-9]+$', tmp):
|
||||
count = int(tmp)
|
||||
if count < 1:
|
||||
count = 1
|
||||
elif count > 1024:
|
||||
count = 1024
|
||||
return count
|
||||
|
||||
def add_option(self, *k, **kw):
|
||||
"""
|
||||
Wraps ``optparse.add_option``::
|
||||
|
||||
def options(ctx):
|
||||
ctx.add_option('-u', '--use', dest='use', default=False,
|
||||
action='store_true', help='a boolean option')
|
||||
|
||||
:rtype: optparse option object
|
||||
"""
|
||||
return self.parser.add_option(*k, **kw)
|
||||
|
||||
def add_option_group(self, *k, **kw):
|
||||
"""
|
||||
Wraps ``optparse.add_option_group``::
|
||||
|
||||
def options(ctx):
|
||||
gr = ctx.add_option_group('some options')
|
||||
gr.add_option('-u', '--use', dest='use', default=False, action='store_true')
|
||||
|
||||
:rtype: optparse option group object
|
||||
"""
|
||||
try:
|
||||
gr = self.option_groups[k[0]]
|
||||
except KeyError:
|
||||
gr = self.parser.add_option_group(*k, **kw)
|
||||
self.option_groups[k[0]] = gr
|
||||
return gr
|
||||
|
||||
def get_option_group(self, opt_str):
|
||||
"""
|
||||
Wraps ``optparse.get_option_group``::
|
||||
|
||||
def options(ctx):
|
||||
gr = ctx.get_option_group('configure options')
|
||||
gr.add_option('-o', '--out', action='store', default='',
|
||||
help='build dir for the project', dest='out')
|
||||
|
||||
:rtype: optparse option group object
|
||||
"""
|
||||
try:
|
||||
return self.option_groups[opt_str]
|
||||
except KeyError:
|
||||
for group in self.parser.option_groups:
|
||||
if group.title == opt_str:
|
||||
return group
|
||||
return None
|
||||
|
||||
def parse_args(self, _args=None):
|
||||
"""
|
||||
Parses arguments from a list which is not necessarily the command-line.
|
||||
|
||||
:param _args: arguments
|
||||
:type _args: list of strings
|
||||
"""
|
||||
global options, commands, envvars
|
||||
(options, leftover_args) = self.parser.parse_args(args=_args)
|
||||
|
||||
for arg in leftover_args:
|
||||
if '=' in arg:
|
||||
envvars.append(arg)
|
||||
else:
|
||||
commands.append(arg)
|
||||
|
||||
if options.destdir:
|
||||
options.destdir = Utils.sane_path(options.destdir)
|
||||
|
||||
if options.verbose >= 1:
|
||||
self.load('errcheck')
|
||||
|
||||
colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
|
||||
Logs.enable_colors(colors)
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
See :py:func:`waflib.Context.Context.execute`
|
||||
"""
|
||||
super(OptionsContext, self).execute()
|
||||
self.parse_args()
|
||||
Utils.alloc_process_pool(options.jobs)
|
353
third_party/waf/waflib/Runner.py
vendored
Normal file
353
third_party/waf/waflib/Runner.py
vendored
Normal file
@ -0,0 +1,353 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"""
|
||||
Runner.py: Task scheduling and execution
|
||||
"""
|
||||
|
||||
import random
|
||||
try:
|
||||
from queue import Queue
|
||||
except ImportError:
|
||||
from Queue import Queue
|
||||
from waflib import Utils, Task, Errors, Logs
|
||||
|
||||
GAP = 20
|
||||
"""
|
||||
Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
|
||||
"""
|
||||
|
||||
class Consumer(Utils.threading.Thread):
|
||||
"""
|
||||
Daemon thread object that executes a task. It shares a semaphore with
|
||||
the coordinator :py:class:`waflib.Runner.Spawner`. There is one
|
||||
instance per task to consume.
|
||||
"""
|
||||
def __init__(self, spawner, task):
|
||||
Utils.threading.Thread.__init__(self)
|
||||
self.task = task
|
||||
"""Task to execute"""
|
||||
self.spawner = spawner
|
||||
"""Coordinator object"""
|
||||
self.setDaemon(1)
|
||||
self.start()
|
||||
def run(self):
|
||||
"""
|
||||
Processes a single task
|
||||
"""
|
||||
try:
|
||||
if not self.spawner.master.stop:
|
||||
self.task.process()
|
||||
finally:
|
||||
self.spawner.sem.release()
|
||||
self.spawner.master.out.put(self.task)
|
||||
self.task = None
|
||||
self.spawner = None
|
||||
|
||||
class Spawner(Utils.threading.Thread):
|
||||
"""
|
||||
Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
|
||||
spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
|
||||
:py:class:`waflib.Task.TaskBase` instance.
|
||||
"""
|
||||
def __init__(self, master):
|
||||
Utils.threading.Thread.__init__(self)
|
||||
self.master = master
|
||||
""":py:class:`waflib.Runner.Parallel` producer instance"""
|
||||
self.sem = Utils.threading.Semaphore(master.numjobs)
|
||||
"""Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
|
||||
self.setDaemon(1)
|
||||
self.start()
|
||||
def run(self):
|
||||
"""
|
||||
Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop`
|
||||
"""
|
||||
try:
|
||||
self.loop()
|
||||
except Exception:
|
||||
# Python 2 prints unnecessary messages when shutting down
|
||||
# we also want to stop the thread properly
|
||||
pass
|
||||
def loop(self):
|
||||
"""
|
||||
Consumes task objects from the producer; ends when the producer has no more
|
||||
task to provide.
|
||||
"""
|
||||
master = self.master
|
||||
while 1:
|
||||
task = master.ready.get()
|
||||
self.sem.acquire()
|
||||
if not master.stop:
|
||||
task.log_display(task.generator.bld)
|
||||
Consumer(self, task)
|
||||
|
||||
class Parallel(object):
|
||||
"""
|
||||
Schedule the tasks obtained from the build context for execution.
|
||||
"""
|
||||
def __init__(self, bld, j=2):
|
||||
"""
|
||||
The initialization requires a build context reference
|
||||
for computing the total number of jobs.
|
||||
"""
|
||||
|
||||
self.numjobs = j
|
||||
"""
|
||||
Amount of parallel consumers to use
|
||||
"""
|
||||
|
||||
self.bld = bld
|
||||
"""
|
||||
Instance of :py:class:`waflib.Build.BuildContext`
|
||||
"""
|
||||
|
||||
self.outstanding = Utils.deque()
|
||||
"""List of :py:class:`waflib.Task.TaskBase` that may be ready to be executed"""
|
||||
|
||||
self.frozen = Utils.deque()
|
||||
"""List of :py:class:`waflib.Task.TaskBase` that are not ready yet"""
|
||||
|
||||
self.ready = Queue(0)
|
||||
"""List of :py:class:`waflib.Task.TaskBase` ready to be executed by consumers"""
|
||||
|
||||
self.out = Queue(0)
|
||||
"""List of :py:class:`waflib.Task.TaskBase` returned by the task consumers"""
|
||||
|
||||
self.count = 0
|
||||
"""Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
|
||||
|
||||
self.processed = 1
|
||||
"""Amount of tasks processed"""
|
||||
|
||||
self.stop = False
|
||||
"""Error flag to stop the build"""
|
||||
|
||||
self.error = []
|
||||
"""Tasks that could not be executed"""
|
||||
|
||||
self.biter = None
|
||||
"""Task iterator which must give groups of parallelizable tasks when calling ``next()``"""
|
||||
|
||||
self.dirty = False
|
||||
"""
|
||||
Flag that indicates that the build cache must be saved when a task was executed
|
||||
(calls :py:meth:`waflib.Build.BuildContext.store`)"""
|
||||
|
||||
self.spawner = Spawner(self)
|
||||
"""
|
||||
Coordinating daemon thread that spawns thread consumers
|
||||
"""
|
||||
|
||||
def get_next_task(self):
|
||||
"""
|
||||
Obtains the next Task instance to run
|
||||
|
||||
:rtype: :py:class:`waflib.Task.TaskBase`
|
||||
"""
|
||||
if not self.outstanding:
|
||||
return None
|
||||
return self.outstanding.popleft()
|
||||
|
||||
def postpone(self, tsk):
|
||||
"""
|
||||
Adds the task to the list :py:attr:`waflib.Runner.Parallel.frozen`.
|
||||
The order is scrambled so as to consume as many tasks in parallel as possible.
|
||||
|
||||
:param tsk: task instance
|
||||
:type tsk: :py:class:`waflib.Task.TaskBase`
|
||||
"""
|
||||
if random.randint(0, 1):
|
||||
self.frozen.appendleft(tsk)
|
||||
else:
|
||||
self.frozen.append(tsk)
|
||||
|
||||
def refill_task_list(self):
|
||||
"""
|
||||
Adds the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
|
||||
"""
|
||||
while self.count > self.numjobs * GAP:
|
||||
self.get_out()
|
||||
|
||||
while not self.outstanding:
|
||||
if self.count:
|
||||
self.get_out()
|
||||
elif self.frozen:
|
||||
try:
|
||||
cond = self.deadlock == self.processed
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if cond:
|
||||
msg = 'check the build order for the tasks'
|
||||
for tsk in self.frozen:
|
||||
if not tsk.run_after:
|
||||
msg = 'check the methods runnable_status'
|
||||
break
|
||||
lst = []
|
||||
for tsk in self.frozen:
|
||||
lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after]))
|
||||
raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst)))
|
||||
self.deadlock = self.processed
|
||||
|
||||
if self.frozen:
|
||||
self.outstanding.extend(self.frozen)
|
||||
self.frozen.clear()
|
||||
elif not self.count:
|
||||
self.outstanding.extend(self.biter.next())
|
||||
self.total = self.bld.total()
|
||||
break
|
||||
|
||||
def add_more_tasks(self, tsk):
|
||||
"""
|
||||
If a task provides :py:attr:`waflib.Task.TaskBase.more_tasks`, then the tasks contained
|
||||
in that list are added to the current build and will be processed before the next build group.
|
||||
|
||||
:param tsk: task instance
|
||||
:type tsk: :py:attr:`waflib.Task.TaskBase`
|
||||
"""
|
||||
if getattr(tsk, 'more_tasks', None):
|
||||
self.outstanding.extend(tsk.more_tasks)
|
||||
self.total += len(tsk.more_tasks)
|
||||
|
||||
def get_out(self):
|
||||
"""
|
||||
Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
|
||||
Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.
|
||||
|
||||
:rtype: :py:attr:`waflib.Task.TaskBase`
|
||||
"""
|
||||
tsk = self.out.get()
|
||||
if not self.stop:
|
||||
self.add_more_tasks(tsk)
|
||||
self.count -= 1
|
||||
self.dirty = True
|
||||
return tsk
|
||||
|
||||
def add_task(self, tsk):
|
||||
"""
|
||||
Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.
|
||||
|
||||
:param tsk: task instance
|
||||
:type tsk: :py:attr:`waflib.Task.TaskBase`
|
||||
"""
|
||||
self.ready.put(tsk)
|
||||
|
||||
def skip(self, tsk):
|
||||
"""
|
||||
Mark a task as skipped/up-to-date
|
||||
"""
|
||||
tsk.hasrun = Task.SKIPPED
|
||||
|
||||
def error_handler(self, tsk):
|
||||
"""
|
||||
Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set, unless
|
||||
the build is executed with::
|
||||
|
||||
$ waf build -k
|
||||
|
||||
:param tsk: task instance
|
||||
:type tsk: :py:attr:`waflib.Task.TaskBase`
|
||||
"""
|
||||
if hasattr(tsk, 'scan') and hasattr(tsk, 'uid'):
|
||||
# TODO waf 2.0 - this breaks encapsulation
|
||||
try:
|
||||
del self.bld.imp_sigs[tsk.uid()]
|
||||
except KeyError:
|
||||
pass
|
||||
if not self.bld.keep:
|
||||
self.stop = True
|
||||
self.error.append(tsk)
|
||||
|
||||
def task_status(self, tsk):
|
||||
"""
|
||||
Obtains the task status to decide whether to run it immediately or not.
|
||||
|
||||
:return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
|
||||
:rtype: integer
|
||||
"""
|
||||
try:
|
||||
return tsk.runnable_status()
|
||||
except Exception:
|
||||
self.processed += 1
|
||||
tsk.err_msg = Utils.ex_stack()
|
||||
if not self.stop and self.bld.keep:
|
||||
self.skip(tsk)
|
||||
if self.bld.keep == 1:
|
||||
# if -k stop at the first exception, if -kk try to go as far as possible
|
||||
if Logs.verbose > 1 or not self.error:
|
||||
self.error.append(tsk)
|
||||
self.stop = True
|
||||
else:
|
||||
if Logs.verbose > 1:
|
||||
self.error.append(tsk)
|
||||
return Task.EXCEPTION
|
||||
tsk.hasrun = Task.EXCEPTION
|
||||
|
||||
self.error_handler(tsk)
|
||||
return Task.EXCEPTION
|
||||
|
||||
def start(self):
|
||||
"""
|
||||
Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to
|
||||
:py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread
|
||||
has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out`
|
||||
and marks the build as failed by setting the ``stop`` flag.
|
||||
If only one job is used, then executes the tasks one by one, without consumers.
|
||||
"""
|
||||
self.total = self.bld.total()
|
||||
|
||||
while not self.stop:
|
||||
|
||||
self.refill_task_list()
|
||||
|
||||
# consider the next task
|
||||
tsk = self.get_next_task()
|
||||
if not tsk:
|
||||
if self.count:
|
||||
# tasks may add new ones after they are run
|
||||
continue
|
||||
else:
|
||||
# no tasks to run, no tasks running, time to exit
|
||||
break
|
||||
|
||||
if tsk.hasrun:
|
||||
# if the task is marked as "run", just skip it
|
||||
self.processed += 1
|
||||
continue
|
||||
|
||||
if self.stop: # stop immediately after a failure was detected
|
||||
break
|
||||
|
||||
|
||||
st = self.task_status(tsk)
|
||||
if st == Task.RUN_ME:
|
||||
self.count += 1
|
||||
self.processed += 1
|
||||
|
||||
if self.numjobs == 1:
|
||||
tsk.log_display(tsk.generator.bld)
|
||||
try:
|
||||
tsk.process()
|
||||
finally:
|
||||
self.out.put(tsk)
|
||||
else:
|
||||
self.add_task(tsk)
|
||||
if st == Task.ASK_LATER:
|
||||
self.postpone(tsk)
|
||||
elif st == Task.SKIP_ME:
|
||||
self.processed += 1
|
||||
self.skip(tsk)
|
||||
self.add_more_tasks(tsk)
|
||||
|
||||
# self.count represents the tasks that have been made available to the consumer threads
|
||||
# collect all the tasks after an error else the message may be incomplete
|
||||
while self.error and self.count:
|
||||
self.get_out()
|
||||
|
||||
self.ready.put(None)
|
||||
assert (self.count == 0 or self.stop)
|
627
third_party/waf/waflib/Scripting.py
vendored
Normal file
627
third_party/waf/waflib/Scripting.py
vendored
Normal file
@ -0,0 +1,627 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"Module called for configuring, compiling and installing targets"
|
||||
|
||||
import os, shlex, shutil, traceback, errno, sys, stat
|
||||
from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
|
||||
|
||||
build_dir_override = None
|
||||
|
||||
no_climb_commands = ['configure']
|
||||
|
||||
default_cmd = "build"
|
||||
|
||||
def waf_entry_point(current_directory, version, wafdir):
|
||||
"""
|
||||
This is the main entry point, all Waf execution starts here.
|
||||
|
||||
:param current_directory: absolute path representing the current directory
|
||||
:type current_directory: string
|
||||
:param version: version number
|
||||
:type version: string
|
||||
:param wafdir: absolute path representing the directory of the waf library
|
||||
:type wafdir: string
|
||||
"""
|
||||
|
||||
Logs.init_log()
|
||||
|
||||
if Context.WAFVERSION != version:
|
||||
Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
|
||||
sys.exit(1)
|
||||
|
||||
if '--version' in sys.argv:
|
||||
Context.run_dir = current_directory
|
||||
ctx = Context.create_context('options')
|
||||
ctx.curdir = current_directory
|
||||
ctx.parse_args()
|
||||
sys.exit(0)
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
# os.path.join handles absolute paths in sys.argv[1] accordingly (it discards the previous ones)
|
||||
# if sys.argv[1] is not an absolute path, then it is relative to the current working directory
|
||||
potential_wscript = os.path.join(current_directory, sys.argv[1])
|
||||
# maybe check if the file is executable
|
||||
# perhaps extract 'wscript' as a constant
|
||||
if os.path.basename(potential_wscript) == 'wscript' and os.path.isfile(potential_wscript):
|
||||
# need to explicitly normalize the path, as it may contain extra '/.'
|
||||
# TODO abspath?
|
||||
current_directory = os.path.normpath(os.path.dirname(potential_wscript))
|
||||
sys.argv.pop(1)
|
||||
|
||||
Context.waf_dir = wafdir
|
||||
Context.launch_dir = current_directory
|
||||
|
||||
# if 'configure' is in the commands, do not search any further
|
||||
no_climb = os.environ.get('NOCLIMB')
|
||||
if not no_climb:
|
||||
for k in no_climb_commands:
|
||||
for y in sys.argv:
|
||||
if y.startswith(k):
|
||||
no_climb = True
|
||||
break
|
||||
|
||||
# if --top is provided assume the build started in the top directory
|
||||
for i, x in enumerate(sys.argv):
|
||||
# WARNING: this modifies sys.argv
|
||||
if x.startswith('--top='):
|
||||
Context.run_dir = Context.top_dir = Utils.sane_path(x[6:])
|
||||
sys.argv[i] = '--top=' + Context.run_dir
|
||||
if x.startswith('--out='):
|
||||
Context.out_dir = Utils.sane_path(x[6:])
|
||||
sys.argv[i] = '--out=' + Context.out_dir
|
||||
|
||||
# try to find a lock file (if the project was configured)
|
||||
# at the same time, store the first wscript file seen
|
||||
cur = current_directory
|
||||
while cur and not Context.top_dir:
|
||||
try:
|
||||
lst = os.listdir(cur)
|
||||
except OSError:
|
||||
lst = []
|
||||
Logs.error('Directory %r is unreadable!', cur)
|
||||
if Options.lockfile in lst:
|
||||
env = ConfigSet.ConfigSet()
|
||||
try:
|
||||
env.load(os.path.join(cur, Options.lockfile))
|
||||
ino = os.stat(cur)[stat.ST_INO]
|
||||
except EnvironmentError:
|
||||
pass
|
||||
else:
|
||||
# check if the folder was not moved
|
||||
for x in (env.run_dir, env.top_dir, env.out_dir):
|
||||
if not x:
|
||||
continue
|
||||
if Utils.is_win32:
|
||||
if cur == x:
|
||||
load = True
|
||||
break
|
||||
else:
|
||||
# if the filesystem features symlinks, compare the inode numbers
|
||||
try:
|
||||
ino2 = os.stat(x)[stat.ST_INO]
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
if ino == ino2:
|
||||
load = True
|
||||
break
|
||||
else:
|
||||
Logs.warn('invalid lock file in %s', cur)
|
||||
load = False
|
||||
|
||||
if load:
|
||||
Context.run_dir = env.run_dir
|
||||
Context.top_dir = env.top_dir
|
||||
Context.out_dir = env.out_dir
|
||||
break
|
||||
|
||||
if not Context.run_dir:
|
||||
if Context.WSCRIPT_FILE in lst:
|
||||
Context.run_dir = cur
|
||||
|
||||
next = os.path.dirname(cur)
|
||||
if next == cur:
|
||||
break
|
||||
cur = next
|
||||
|
||||
if no_climb:
|
||||
break
|
||||
|
||||
if not Context.run_dir:
|
||||
if '-h' in sys.argv or '--help' in sys.argv:
|
||||
Logs.warn('No wscript file found: the help message may be incomplete')
|
||||
Context.run_dir = current_directory
|
||||
ctx = Context.create_context('options')
|
||||
ctx.curdir = current_directory
|
||||
ctx.parse_args()
|
||||
sys.exit(0)
|
||||
Logs.error('Waf: Run from a directory containing a file named %r', Context.WSCRIPT_FILE)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
os.chdir(Context.run_dir)
|
||||
except OSError:
|
||||
Logs.error('Waf: The folder %r is unreadable', Context.run_dir)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
set_main_module(os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE)))
|
||||
except Errors.WafError ,e:
|
||||
Logs.pprint('RED', e.verbose_msg)
|
||||
Logs.error(str(e))
|
||||
sys.exit(1)
|
||||
except Exception ,e:
|
||||
Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
sys.exit(2)
|
||||
|
||||
if '--profile' in sys.argv:
|
||||
import cProfile, pstats
|
||||
cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
|
||||
p = pstats.Stats('profi.txt')
|
||||
p.sort_stats('time').print_stats(75) # or 'cumulative'
|
||||
else:
|
||||
try:
|
||||
run_commands()
|
||||
except Errors.WafError ,e:
|
||||
if Logs.verbose > 1:
|
||||
Logs.pprint('RED', e.verbose_msg)
|
||||
Logs.error(e.msg)
|
||||
sys.exit(1)
|
||||
except SystemExit:
|
||||
raise
|
||||
except Exception ,e:
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
sys.exit(2)
|
||||
except KeyboardInterrupt:
|
||||
Logs.pprint('RED', 'Interrupted')
|
||||
sys.exit(68)
|
||||
|
||||
def set_main_module(file_path):
|
||||
"""
|
||||
Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and
|
||||
bind default functions such as ``init``, ``dist``, ``distclean`` if not defined.
|
||||
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
|
||||
|
||||
:param file_path: absolute path representing the top-level wscript file
|
||||
:type file_path: string
|
||||
"""
|
||||
Context.g_module = Context.load_module(file_path)
|
||||
Context.g_module.root_path = file_path
|
||||
|
||||
# note: to register the module globally, use the following:
|
||||
# sys.modules['wscript_main'] = g_module
|
||||
|
||||
def set_def(obj):
|
||||
name = obj.__name__
|
||||
if not name in Context.g_module.__dict__:
|
||||
setattr(Context.g_module, name, obj)
|
||||
for k in (dist, distclean, distcheck):
|
||||
set_def(k)
|
||||
# add dummy init and shutdown functions if they're not defined
|
||||
if not 'init' in Context.g_module.__dict__:
|
||||
Context.g_module.init = Utils.nada
|
||||
if not 'shutdown' in Context.g_module.__dict__:
|
||||
Context.g_module.shutdown = Utils.nada
|
||||
if not 'options' in Context.g_module.__dict__:
|
||||
Context.g_module.options = Utils.nada
|
||||
|
||||
def parse_options():
|
||||
"""
|
||||
Parses the command-line options and initialize the logging system.
|
||||
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
|
||||
"""
|
||||
Context.create_context('options').execute()
|
||||
|
||||
for var in Options.envvars:
|
||||
(name, value) = var.split('=', 1)
|
||||
os.environ[name.strip()] = value
|
||||
|
||||
if not Options.commands:
|
||||
Options.commands = [default_cmd]
|
||||
Options.commands = [x for x in Options.commands if x != 'options'] # issue 1076
|
||||
|
||||
# process some internal Waf options
|
||||
Logs.verbose = Options.options.verbose
|
||||
#Logs.init_log()
|
||||
|
||||
if Options.options.zones:
|
||||
Logs.zones = Options.options.zones.split(',')
|
||||
if not Logs.verbose:
|
||||
Logs.verbose = 1
|
||||
elif Logs.verbose > 0:
|
||||
Logs.zones = ['runner']
|
||||
|
||||
if Logs.verbose > 2:
|
||||
Logs.zones = ['*']
|
||||
|
||||
def run_command(cmd_name):
|
||||
"""
|
||||
Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`.
|
||||
|
||||
:param cmd_name: command to execute, like ``build``
|
||||
:type cmd_name: string
|
||||
"""
|
||||
ctx = Context.create_context(cmd_name)
|
||||
ctx.log_timer = Utils.Timer()
|
||||
ctx.options = Options.options # provided for convenience
|
||||
ctx.cmd = cmd_name
|
||||
try:
|
||||
ctx.execute()
|
||||
finally:
|
||||
# Issue 1374
|
||||
ctx.finalize()
|
||||
return ctx
|
||||
|
||||
def run_commands():
|
||||
"""
|
||||
Execute the Waf commands that were given on the command-line, and the other options
|
||||
Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
|
||||
after :py:func:`waflib.Scripting.parse_options`.
|
||||
"""
|
||||
parse_options()
|
||||
run_command('init')
|
||||
while Options.commands:
|
||||
cmd_name = Options.commands.pop(0)
|
||||
ctx = run_command(cmd_name)
|
||||
Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer)
|
||||
run_command('shutdown')
|
||||
|
||||
###########################################################################################
|
||||
|
||||
def distclean_dir(dirname):
|
||||
"""
|
||||
Distclean function called in the particular case when::
|
||||
|
||||
top == out
|
||||
|
||||
:param dirname: absolute path of the folder to clean
|
||||
:type dirname: string
|
||||
"""
|
||||
for (root, dirs, files) in os.walk(dirname):
|
||||
for f in files:
|
||||
if f.endswith(('.o', '.moc', '.exe')):
|
||||
fname = os.path.join(root, f)
|
||||
try:
|
||||
os.remove(fname)
|
||||
except OSError:
|
||||
Logs.warn('Could not remove %r', fname)
|
||||
|
||||
for x in (Context.DBFILE, 'config.log'):
|
||||
try:
|
||||
os.remove(x)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
try:
|
||||
shutil.rmtree('c4che')
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def distclean(ctx):
|
||||
'''removes the build directory'''
|
||||
lst = os.listdir('.')
|
||||
for f in lst:
|
||||
if f == Options.lockfile:
|
||||
try:
|
||||
proj = ConfigSet.ConfigSet(f)
|
||||
except IOError:
|
||||
Logs.warn('Could not read %r', f)
|
||||
continue
|
||||
|
||||
if proj['out_dir'] != proj['top_dir']:
|
||||
try:
|
||||
shutil.rmtree(proj['out_dir'])
|
||||
except EnvironmentError ,e:
|
||||
if e.errno != errno.ENOENT:
|
||||
Logs.warn('Could not remove %r', proj['out_dir'])
|
||||
else:
|
||||
distclean_dir(proj['out_dir'])
|
||||
|
||||
for k in (proj['out_dir'], proj['top_dir'], proj['run_dir']):
|
||||
p = os.path.join(k, Options.lockfile)
|
||||
try:
|
||||
os.remove(p)
|
||||
except OSError ,e:
|
||||
if e.errno != errno.ENOENT:
|
||||
Logs.warn('Could not remove %r', p)
|
||||
|
||||
# remove local waf cache folders
|
||||
if not Options.commands:
|
||||
for x in '.waf-1. waf-1. .waf3-1. waf3-1.'.split():
|
||||
if f.startswith(x):
|
||||
shutil.rmtree(f, ignore_errors=True)
|
||||
|
||||
class Dist(Context.Context):
|
||||
'''creates an archive containing the project source code'''
|
||||
cmd = 'dist'
|
||||
fun = 'dist'
|
||||
algo = 'tar.bz2'
|
||||
ext_algo = {}
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
See :py:func:`waflib.Context.Context.execute`
|
||||
"""
|
||||
self.recurse([os.path.dirname(Context.g_module.root_path)])
|
||||
self.archive()
|
||||
|
||||
def archive(self):
|
||||
"""
|
||||
Creates the source archive.
|
||||
"""
|
||||
import tarfile
|
||||
|
||||
arch_name = self.get_arch_name()
|
||||
|
||||
try:
|
||||
self.base_path
|
||||
except AttributeError:
|
||||
self.base_path = self.path
|
||||
|
||||
node = self.base_path.make_node(arch_name)
|
||||
try:
|
||||
node.delete()
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
files = self.get_files()
|
||||
|
||||
if self.algo.startswith('tar.'):
|
||||
tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', ''))
|
||||
|
||||
for x in files:
|
||||
self.add_tar_file(x, tar)
|
||||
tar.close()
|
||||
elif self.algo == 'zip':
|
||||
import zipfile
|
||||
zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED)
|
||||
|
||||
for x in files:
|
||||
archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
|
||||
zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED)
|
||||
zip.close()
|
||||
else:
|
||||
self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
|
||||
|
||||
try:
|
||||
from hashlib import sha1
|
||||
except ImportError:
|
||||
digest = ''
|
||||
else:
|
||||
digest = ' (sha=%r)' % sha1(node.read(flags='rb')).hexdigest()
|
||||
|
||||
Logs.info('New archive created: %s%s', self.arch_name, digest)
|
||||
|
||||
def get_tar_path(self, node):
|
||||
"""
|
||||
Return the path to use for a node in the tar archive, the purpose of this
|
||||
is to let subclases resolve symbolic links or to change file names
|
||||
|
||||
:return: absolute path
|
||||
:rtype: string
|
||||
"""
|
||||
return node.abspath()
|
||||
|
||||
def add_tar_file(self, x, tar):
|
||||
"""
|
||||
Adds a file to the tar archive. Symlinks are not verified.
|
||||
|
||||
:param x: file path
|
||||
:param tar: tar file object
|
||||
"""
|
||||
p = self.get_tar_path(x)
|
||||
tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
|
||||
tinfo.uid = 0
|
||||
tinfo.gid = 0
|
||||
tinfo.uname = 'root'
|
||||
tinfo.gname = 'root'
|
||||
|
||||
if os.path.isfile(p):
|
||||
fu = open(p, 'rb')
|
||||
try:
|
||||
tar.addfile(tinfo, fileobj=fu)
|
||||
finally:
|
||||
fu.close()
|
||||
else:
|
||||
tar.addfile(tinfo)
|
||||
|
||||
def get_tar_prefix(self):
|
||||
"""
|
||||
Returns the base path for files added into the archive tar file
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
try:
|
||||
return self.tar_prefix
|
||||
except AttributeError:
|
||||
return self.get_base_name()
|
||||
|
||||
def get_arch_name(self):
|
||||
"""
|
||||
Returns the archive file name.
|
||||
Set the attribute *arch_name* to change the default value::
|
||||
|
||||
def dist(ctx):
|
||||
ctx.arch_name = 'ctx.tar.bz2'
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
try:
|
||||
self.arch_name
|
||||
except AttributeError:
|
||||
self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo)
|
||||
return self.arch_name
|
||||
|
||||
def get_base_name(self):
|
||||
"""
|
||||
Returns the default name of the main directory in the archive, which is set to *appname-version*.
|
||||
Set the attribute *base_name* to change the default value::
|
||||
|
||||
def dist(ctx):
|
||||
ctx.base_name = 'files'
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
try:
|
||||
self.base_name
|
||||
except AttributeError:
|
||||
appname = getattr(Context.g_module, Context.APPNAME, 'noname')
|
||||
version = getattr(Context.g_module, Context.VERSION, '1.0')
|
||||
self.base_name = appname + '-' + version
|
||||
return self.base_name
|
||||
|
||||
def get_excl(self):
|
||||
"""
|
||||
Returns the patterns to exclude for finding the files in the top-level directory.
|
||||
Set the attribute *excl* to change the default value::
|
||||
|
||||
def dist(ctx):
|
||||
ctx.excl = 'build **/*.o **/*.class'
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
try:
|
||||
return self.excl
|
||||
except AttributeError:
|
||||
self.excl = Node.exclude_regs + ' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
|
||||
if Context.out_dir:
|
||||
nd = self.root.find_node(Context.out_dir)
|
||||
if nd:
|
||||
self.excl += ' ' + nd.path_from(self.base_path)
|
||||
return self.excl
|
||||
|
||||
def get_files(self):
|
||||
"""
|
||||
Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`.
|
||||
Set *files* to prevent this behaviour::
|
||||
|
||||
def dist(ctx):
|
||||
ctx.files = ctx.path.find_node('wscript')
|
||||
|
||||
Files are also searched from the directory 'base_path', to change it, set::
|
||||
|
||||
def dist(ctx):
|
||||
ctx.base_path = path
|
||||
|
||||
:rtype: list of :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
try:
|
||||
files = self.files
|
||||
except AttributeError:
|
||||
files = self.base_path.ant_glob('**/*', excl=self.get_excl())
|
||||
return files
|
||||
|
||||
def dist(ctx):
|
||||
'''makes a tarball for redistributing the sources'''
|
||||
pass
|
||||
|
||||
class DistCheck(Dist):
|
||||
"""
|
||||
Creates an archive of the project, then attempts to build the project in a temporary directory::
|
||||
|
||||
$ waf distcheck
|
||||
"""
|
||||
fun = 'distcheck'
|
||||
cmd = 'distcheck'
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
See :py:func:`waflib.Context.Context.execute`
|
||||
"""
|
||||
self.recurse([os.path.dirname(Context.g_module.root_path)])
|
||||
self.archive()
|
||||
self.check()
|
||||
|
||||
def make_distcheck_cmd(self, tmpdir):
|
||||
cfg = []
|
||||
if Options.options.distcheck_args:
|
||||
cfg = shlex.split(Options.options.distcheck_args)
|
||||
else:
|
||||
cfg = [x for x in sys.argv if x.startswith('-')]
|
||||
cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg
|
||||
return cmd
|
||||
|
||||
def check(self):
|
||||
"""
|
||||
Creates the archive, uncompresses it and tries to build the project
|
||||
"""
|
||||
import tempfile, tarfile
|
||||
|
||||
try:
|
||||
t = tarfile.open(self.get_arch_name())
|
||||
for x in t:
|
||||
t.extract(x)
|
||||
finally:
|
||||
t.close()
|
||||
|
||||
instdir = tempfile.mkdtemp('.inst', self.get_base_name())
|
||||
cmd = self.make_distcheck_cmd(instdir)
|
||||
ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait()
|
||||
if ret:
|
||||
raise Errors.WafError('distcheck failed with code %r' % ret)
|
||||
|
||||
if os.path.exists(instdir):
|
||||
raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
|
||||
|
||||
shutil.rmtree(self.get_base_name())
|
||||
|
||||
|
||||
def distcheck(ctx):
|
||||
'''checks if the project compiles (tarball from 'dist')'''
|
||||
pass
|
||||
|
||||
def autoconfigure(execute_method):
|
||||
"""
|
||||
Decorator that enables context commands to run *configure* as needed.
|
||||
"""
|
||||
def execute(self):
|
||||
"""
|
||||
Wraps :py:func:`waflib.Context.Context.execute` on the context class
|
||||
"""
|
||||
if not Configure.autoconfig:
|
||||
return execute_method(self)
|
||||
|
||||
env = ConfigSet.ConfigSet()
|
||||
do_config = False
|
||||
try:
|
||||
env.load(os.path.join(Context.top_dir, Options.lockfile))
|
||||
except EnvironmentError:
|
||||
Logs.warn('Configuring the project')
|
||||
do_config = True
|
||||
else:
|
||||
if env.run_dir != Context.run_dir:
|
||||
do_config = True
|
||||
else:
|
||||
h = 0
|
||||
for f in env.files:
|
||||
try:
|
||||
h = Utils.h_list((h, Utils.readf(f, 'rb')))
|
||||
except EnvironmentError:
|
||||
do_config = True
|
||||
break
|
||||
else:
|
||||
do_config = h != env.hash
|
||||
|
||||
if do_config:
|
||||
cmd = env.config_cmd or 'configure'
|
||||
if Configure.autoconfig == 'clobber':
|
||||
tmp = Options.options.__dict__
|
||||
Options.options.__dict__ = env.options
|
||||
try:
|
||||
run_command(cmd)
|
||||
finally:
|
||||
Options.options.__dict__ = tmp
|
||||
else:
|
||||
run_command(cmd)
|
||||
run_command(self.cmd)
|
||||
else:
|
||||
return execute_method(self)
|
||||
return execute
|
||||
Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)
|
1242
third_party/waf/waflib/Task.py
vendored
Normal file
1242
third_party/waf/waflib/Task.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
891
third_party/waf/waflib/TaskGen.py
vendored
Normal file
891
third_party/waf/waflib/TaskGen.py
vendored
Normal file
@ -0,0 +1,891 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"""
|
||||
Task generators
|
||||
|
||||
The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
|
||||
The instances can have various parameters, but the creation of task nodes (Task.py)
|
||||
is deferred. To achieve this, various methods are called from the method "apply"
|
||||
"""
|
||||
|
||||
import copy, re, os, functools
|
||||
from waflib import Task, Utils, Logs, Errors, ConfigSet, Node
|
||||
|
||||
feats = Utils.defaultdict(set)
|
||||
"""remember the methods declaring features"""
|
||||
|
||||
HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']
|
||||
|
||||
class task_gen(object):
|
||||
"""
|
||||
Instances of this class create :py:class:`waflib.Task.TaskBase` when
|
||||
calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
|
||||
A few notes:
|
||||
|
||||
* The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..)
|
||||
* The 'features' are used to add methods to self.meths and then execute them
|
||||
* The attribute 'path' is a node representing the location of the task generator
|
||||
* The tasks created are added to the attribute *tasks*
|
||||
* The attribute 'idx' is a counter of task generators in the same path
|
||||
"""
|
||||
|
||||
mappings = Utils.ordered_iter_dict()
|
||||
"""Mappings are global file extension mappings that are retrieved in the order of definition"""
|
||||
|
||||
prec = Utils.defaultdict(list)
|
||||
"""Dict that holds the precedence execution rules for task generator methods"""
|
||||
|
||||
def __init__(self, *k, **kw):
|
||||
"""
|
||||
Task generator objects predefine various attributes (source, target) for possible
|
||||
processing by process_rule (make-like rules) or process_source (extensions, misc methods)
|
||||
|
||||
Tasks are stored on the attribute 'tasks'. They are created by calling methods
|
||||
listed in ``self.meths`` or referenced in the attribute ``features``
|
||||
A topological sort is performed to execute the methods in correct order.
|
||||
|
||||
The extra key/value elements passed in ``kw`` are set as attributes
|
||||
"""
|
||||
self.source = ''
|
||||
self.target = ''
|
||||
|
||||
self.meths = []
|
||||
"""
|
||||
List of method names to execute (internal)
|
||||
"""
|
||||
|
||||
self.features = []
|
||||
"""
|
||||
List of feature names for bringing new methods in
|
||||
"""
|
||||
|
||||
self.tasks = []
|
||||
"""
|
||||
Tasks created are added to this list
|
||||
"""
|
||||
|
||||
if not 'bld' in kw:
|
||||
# task generators without a build context :-/
|
||||
self.env = ConfigSet.ConfigSet()
|
||||
self.idx = 0
|
||||
self.path = None
|
||||
else:
|
||||
self.bld = kw['bld']
|
||||
self.env = self.bld.env.derive()
|
||||
self.path = self.bld.path # emulate chdir when reading scripts
|
||||
|
||||
# provide a unique id
|
||||
try:
|
||||
self.idx = self.bld.idx[self.path] = self.bld.idx.get(self.path, 0) + 1
|
||||
except AttributeError:
|
||||
self.bld.idx = {}
|
||||
self.idx = self.bld.idx[self.path] = 1
|
||||
|
||||
for key, val in kw.items():
|
||||
setattr(self, key, val)
|
||||
|
||||
def __str__(self):
|
||||
"""Debugging helper"""
|
||||
return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())
|
||||
|
||||
def __repr__(self):
|
||||
"""Debugging helper"""
|
||||
lst = []
|
||||
for x in self.__dict__:
|
||||
if x not in ('env', 'bld', 'compiled_tasks', 'tasks'):
|
||||
lst.append("%s=%s" % (x, repr(getattr(self, x))))
|
||||
return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())
|
||||
|
||||
def get_cwd(self):
|
||||
"""
|
||||
Current working directory for the task generator, defaults to the build directory.
|
||||
This is still used in a few places but it should disappear at some point as the classes
|
||||
define their own working directory.
|
||||
|
||||
:rtype: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
return self.bld.bldnode
|
||||
|
||||
def get_name(self):
|
||||
"""
|
||||
If the attribute ``name`` is not set on the instance,
|
||||
the name is computed from the target name::
|
||||
|
||||
def build(bld):
|
||||
x = bld(name='foo')
|
||||
x.get_name() # foo
|
||||
y = bld(target='bar')
|
||||
y.get_name() # bar
|
||||
|
||||
:rtype: string
|
||||
:return: name of this task generator
|
||||
"""
|
||||
try:
|
||||
return self._name
|
||||
except AttributeError:
|
||||
if isinstance(self.target, list):
|
||||
lst = [str(x) for x in self.target]
|
||||
name = self._name = ','.join(lst)
|
||||
else:
|
||||
name = self._name = str(self.target)
|
||||
return name
|
||||
def set_name(self, name):
|
||||
self._name = name
|
||||
|
||||
name = property(get_name, set_name)
|
||||
|
||||
def to_list(self, val):
|
||||
"""
|
||||
Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list`
|
||||
|
||||
:type val: string or list of string
|
||||
:param val: input to return as a list
|
||||
:rtype: list
|
||||
"""
|
||||
if isinstance(val, str):
|
||||
return val.split()
|
||||
else:
|
||||
return val
|
||||
|
||||
def post(self):
|
||||
"""
|
||||
Creates tasks for this task generators. The following operations are performed:
|
||||
|
||||
#. The body of this method is called only once and sets the attribute ``posted``
|
||||
#. The attribute ``features`` is used to add more methods in ``self.meths``
|
||||
#. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
|
||||
#. The methods are then executed in order
|
||||
#. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
|
||||
"""
|
||||
if getattr(self, 'posted', None):
|
||||
return False
|
||||
self.posted = True
|
||||
|
||||
keys = set(self.meths)
|
||||
keys.update(feats['*'])
|
||||
|
||||
# add the methods listed in the features
|
||||
self.features = Utils.to_list(self.features)
|
||||
for x in self.features:
|
||||
st = feats[x]
|
||||
if st:
|
||||
keys.update(st)
|
||||
elif not x in Task.classes:
|
||||
Logs.warn('feature %r does not exist - bind at least one method to it?', x)
|
||||
|
||||
# copy the precedence table
|
||||
prec = {}
|
||||
prec_tbl = self.prec
|
||||
for x in prec_tbl:
|
||||
if x in keys:
|
||||
prec[x] = prec_tbl[x]
|
||||
|
||||
# elements disconnected
|
||||
tmp = []
|
||||
for a in keys:
|
||||
for x in prec.values():
|
||||
if a in x: break
|
||||
else:
|
||||
tmp.append(a)
|
||||
|
||||
tmp.sort()
|
||||
|
||||
# topological sort
|
||||
out = []
|
||||
while tmp:
|
||||
e = tmp.pop()
|
||||
if e in keys:
|
||||
out.append(e)
|
||||
try:
|
||||
nlst = prec[e]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
del prec[e]
|
||||
for x in nlst:
|
||||
for y in prec:
|
||||
if x in prec[y]:
|
||||
break
|
||||
else:
|
||||
tmp.append(x)
|
||||
|
||||
if prec:
|
||||
buf = ['Cycle detected in the method execution:']
|
||||
for k, v in prec.items():
|
||||
buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
|
||||
raise Errors.WafError('\n'.join(buf))
|
||||
out.reverse()
|
||||
self.meths = out
|
||||
|
||||
# then we run the methods in order
|
||||
Logs.debug('task_gen: posting %s %d', self, id(self))
|
||||
for x in out:
|
||||
try:
|
||||
v = getattr(self, x)
|
||||
except AttributeError:
|
||||
raise Errors.WafError('%r is not a valid task generator method' % x)
|
||||
Logs.debug('task_gen: -> %s (%d)', x, id(self))
|
||||
v()
|
||||
|
||||
Logs.debug('task_gen: posted %s', self.name)
|
||||
return True
|
||||
|
||||
def get_hook(self, node):
|
||||
"""
|
||||
Returns the ``@extension`` method to call for a Node of a particular extension.
|
||||
|
||||
:param node: Input file to process
|
||||
:type node: :py:class:`waflib.Tools.Node.Node`
|
||||
:return: A method able to process the input node by looking at the extension
|
||||
:rtype: function
|
||||
"""
|
||||
name = node.name
|
||||
for k in self.mappings:
|
||||
try:
|
||||
if name.endswith(k):
|
||||
return self.mappings[k]
|
||||
except TypeError:
|
||||
# regexps objects
|
||||
if k.match(name):
|
||||
return self.mappings[k]
|
||||
keys = list(self.mappings.keys())
|
||||
raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys))
|
||||
|
||||
def create_task(self, name, src=None, tgt=None, **kw):
|
||||
"""
|
||||
Creates task instances.
|
||||
|
||||
:param name: task class name
|
||||
:type name: string
|
||||
:param src: input nodes
|
||||
:type src: list of :py:class:`waflib.Tools.Node.Node`
|
||||
:param tgt: output nodes
|
||||
:type tgt: list of :py:class:`waflib.Tools.Node.Node`
|
||||
:return: A task object
|
||||
:rtype: :py:class:`waflib.Task.TaskBase`
|
||||
"""
|
||||
task = Task.classes[name](env=self.env.derive(), generator=self)
|
||||
if src:
|
||||
task.set_inputs(src)
|
||||
if tgt:
|
||||
task.set_outputs(tgt)
|
||||
task.__dict__.update(kw)
|
||||
self.tasks.append(task)
|
||||
return task
|
||||
|
||||
def clone(self, env):
|
||||
"""
|
||||
Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the
|
||||
it does not create the same output files as the original, or the same files may
|
||||
be compiled several times.
|
||||
|
||||
:param env: A configuration set
|
||||
:type env: :py:class:`waflib.ConfigSet.ConfigSet`
|
||||
:return: A copy
|
||||
:rtype: :py:class:`waflib.TaskGen.task_gen`
|
||||
"""
|
||||
newobj = self.bld()
|
||||
for x in self.__dict__:
|
||||
if x in ('env', 'bld'):
|
||||
continue
|
||||
elif x in ('path', 'features'):
|
||||
setattr(newobj, x, getattr(self, x))
|
||||
else:
|
||||
setattr(newobj, x, copy.copy(getattr(self, x)))
|
||||
|
||||
newobj.posted = False
|
||||
if isinstance(env, str):
|
||||
newobj.env = self.bld.all_envs[env].derive()
|
||||
else:
|
||||
newobj.env = env.derive()
|
||||
|
||||
return newobj
|
||||
|
||||
def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
|
||||
ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
|
||||
"""
|
||||
Creates a new mapping and a task class for processing files by extension.
|
||||
See Tools/flex.py for an example.
|
||||
|
||||
:param name: name for the task class
|
||||
:type name: string
|
||||
:param rule: function to execute or string to be compiled in a function
|
||||
:type rule: string or function
|
||||
:param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable)
|
||||
:type reentrant: int
|
||||
:param color: color for the task output
|
||||
:type color: string
|
||||
:param ext_in: execute the task only after the files of such extensions are created
|
||||
:type ext_in: list of string
|
||||
:param ext_out: execute the task only before files of such extensions are processed
|
||||
:type ext_out: list of string
|
||||
:param before: execute instances of this task before classes of the given names
|
||||
:type before: list of string
|
||||
:param after: execute instances of this task after classes of the given names
|
||||
:type after: list of string
|
||||
:param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order)
|
||||
:type decider: function
|
||||
:param scan: scanner function for the task
|
||||
:type scan: function
|
||||
:param install_path: installation path for the output nodes
|
||||
:type install_path: string
|
||||
"""
|
||||
ext_in = Utils.to_list(ext_in)
|
||||
ext_out = Utils.to_list(ext_out)
|
||||
if not name:
|
||||
name = rule
|
||||
cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)
|
||||
|
||||
def x_file(self, node):
|
||||
if ext_in:
|
||||
_ext_in = ext_in[0]
|
||||
|
||||
tsk = self.create_task(name, node)
|
||||
cnt = 0
|
||||
|
||||
ext = decider(self, node) if decider else cls.ext_out
|
||||
for x in ext:
|
||||
k = node.change_ext(x, ext_in=_ext_in)
|
||||
tsk.outputs.append(k)
|
||||
|
||||
if reentrant != None:
|
||||
if cnt < int(reentrant):
|
||||
self.source.append(k)
|
||||
else:
|
||||
# reinject downstream files into the build
|
||||
for y in self.mappings: # ~ nfile * nextensions :-/
|
||||
if k.name.endswith(y):
|
||||
self.source.append(k)
|
||||
break
|
||||
cnt += 1
|
||||
|
||||
if install_path:
|
||||
self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs)
|
||||
return tsk
|
||||
|
||||
for x in cls.ext_in:
|
||||
task_gen.mappings[x] = x_file
|
||||
return x_file
|
||||
|
||||
def taskgen_method(func):
|
||||
"""
|
||||
Decorator that registers method as a task generator method.
|
||||
The function must accept a task generator as first parameter::
|
||||
|
||||
from waflib.TaskGen import taskgen_method
|
||||
@taskgen_method
|
||||
def mymethod(self):
|
||||
pass
|
||||
|
||||
:param func: task generator method to add
|
||||
:type func: function
|
||||
:rtype: function
|
||||
"""
|
||||
setattr(task_gen, func.__name__, func)
|
||||
return func
|
||||
|
||||
def feature(*k):
|
||||
"""
|
||||
Decorator that registers a task generator method that will be executed when the
|
||||
object attribute ``feature`` contains the corresponding key(s)::
|
||||
|
||||
from waflib.Task import feature
|
||||
@feature('myfeature')
|
||||
def myfunction(self):
|
||||
print('that is my feature!')
|
||||
def build(bld):
|
||||
bld(features='myfeature')
|
||||
|
||||
:param k: feature names
|
||||
:type k: list of string
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
for name in k:
|
||||
feats[name].update([func.__name__])
|
||||
return func
|
||||
return deco
|
||||
|
||||
def before_method(*k):
|
||||
"""
|
||||
Decorator that registera task generator method which will be executed
|
||||
before the functions of given name(s)::
|
||||
|
||||
from waflib.TaskGen import feature, before
|
||||
@feature('myfeature')
|
||||
@before_method('fun2')
|
||||
def fun1(self):
|
||||
print('feature 1!')
|
||||
@feature('myfeature')
|
||||
def fun2(self):
|
||||
print('feature 2!')
|
||||
def build(bld):
|
||||
bld(features='myfeature')
|
||||
|
||||
:param k: method names
|
||||
:type k: list of string
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
for fun_name in k:
|
||||
if not func.__name__ in task_gen.prec[fun_name]:
|
||||
task_gen.prec[fun_name].append(func.__name__)
|
||||
#task_gen.prec[fun_name].sort()
|
||||
return func
|
||||
return deco
|
||||
before = before_method
|
||||
|
||||
def after_method(*k):
|
||||
"""
|
||||
Decorator that registers a task generator method which will be executed
|
||||
after the functions of given name(s)::
|
||||
|
||||
from waflib.TaskGen import feature, after
|
||||
@feature('myfeature')
|
||||
@after_method('fun2')
|
||||
def fun1(self):
|
||||
print('feature 1!')
|
||||
@feature('myfeature')
|
||||
def fun2(self):
|
||||
print('feature 2!')
|
||||
def build(bld):
|
||||
bld(features='myfeature')
|
||||
|
||||
:param k: method names
|
||||
:type k: list of string
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
for fun_name in k:
|
||||
if not fun_name in task_gen.prec[func.__name__]:
|
||||
task_gen.prec[func.__name__].append(fun_name)
|
||||
#task_gen.prec[func.__name__].sort()
|
||||
return func
|
||||
return deco
|
||||
after = after_method
|
||||
|
||||
def extension(*k):
|
||||
"""
|
||||
Decorator that registers a task generator method which will be invoked during
|
||||
the processing of source files for the extension given::
|
||||
|
||||
from waflib import Task
|
||||
class mytask(Task):
|
||||
run_str = 'cp ${SRC} ${TGT}'
|
||||
@extension('.moo')
|
||||
def create_maa_file(self, node):
|
||||
self.create_task('mytask', node, node.change_ext('.maa'))
|
||||
def build(bld):
|
||||
bld(source='foo.moo')
|
||||
"""
|
||||
def deco(func):
|
||||
setattr(task_gen, func.__name__, func)
|
||||
for x in k:
|
||||
task_gen.mappings[x] = func
|
||||
return func
|
||||
return deco
|
||||
|
||||
# ---------------------------------------------------------------
|
||||
# The following methods are task generator methods commonly used
|
||||
# they are almost examples, the rest of waf core does not depend on them
|
||||
|
||||
@taskgen_method
|
||||
def to_nodes(self, lst, path=None):
|
||||
"""
|
||||
Converts the input list into a list of nodes.
|
||||
It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
|
||||
It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
|
||||
|
||||
:param lst: input list
|
||||
:type lst: list of string and nodes
|
||||
:param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`)
|
||||
:type path: :py:class:`waflib.Tools.Node.Node`
|
||||
:rtype: list of :py:class:`waflib.Tools.Node.Node`
|
||||
"""
|
||||
tmp = []
|
||||
path = path or self.path
|
||||
find = path.find_resource
|
||||
|
||||
if isinstance(lst, Node.Node):
|
||||
lst = [lst]
|
||||
|
||||
# either a list or a string, convert to a list of nodes
|
||||
for x in Utils.to_list(lst):
|
||||
if isinstance(x, str):
|
||||
node = find(x)
|
||||
else:
|
||||
node = x
|
||||
if not node:
|
||||
raise Errors.WafError("source not found: %r in %r" % (x, self))
|
||||
tmp.append(node)
|
||||
return tmp
|
||||
|
||||
@feature('*')
|
||||
def process_source(self):
|
||||
"""
|
||||
Processes each element in the attribute ``source`` by extension.
|
||||
|
||||
#. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
|
||||
#. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
|
||||
#. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook`
|
||||
#. When called, the methods may modify self.source to append more source to process
|
||||
#. The mappings can map an extension or a filename (see the code below)
|
||||
"""
|
||||
self.source = self.to_nodes(getattr(self, 'source', []))
|
||||
for node in self.source:
|
||||
self.get_hook(node)(self, node)
|
||||
|
||||
@feature('*')
|
||||
@before_method('process_source')
|
||||
def process_rule(self):
|
||||
"""
|
||||
Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
|
||||
|
||||
def build(bld):
|
||||
bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
|
||||
"""
|
||||
if not getattr(self, 'rule', None):
|
||||
return
|
||||
|
||||
# create the task class
|
||||
name = str(getattr(self, 'name', None) or self.target or getattr(self.rule, '__name__', self.rule))
|
||||
|
||||
# or we can put the class in a cache for performance reasons
|
||||
try:
|
||||
cache = self.bld.cache_rule_attr
|
||||
except AttributeError:
|
||||
cache = self.bld.cache_rule_attr = {}
|
||||
|
||||
chmod = getattr(self, 'chmod', None)
|
||||
shell = getattr(self, 'shell', True)
|
||||
color = getattr(self, 'color', 'BLUE')
|
||||
scan = getattr(self, 'scan', None)
|
||||
_vars = getattr(self, 'vars', [])
|
||||
cls_str = getattr(self, 'cls_str', None)
|
||||
cls_keyword = getattr(self, 'cls_keyword', None)
|
||||
use_cache = getattr(self, 'cache_rule', 'True')
|
||||
|
||||
scan_val = has_deps = hasattr(self, 'deps')
|
||||
if scan:
|
||||
scan_val = id(scan)
|
||||
|
||||
key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars))
|
||||
|
||||
cls = None
|
||||
if use_cache:
|
||||
try:
|
||||
cls = cache[key]
|
||||
except KeyError:
|
||||
pass
|
||||
if not cls:
|
||||
rule = self.rule
|
||||
if chmod is not None:
|
||||
def chmod_fun(tsk):
|
||||
for x in tsk.outputs:
|
||||
os.chmod(x.abspath(), tsk.generator.chmod)
|
||||
if isinstance(rule, tuple):
|
||||
rule = list(rule)
|
||||
rule.append(chmod_fun)
|
||||
rule = tuple(rule)
|
||||
else:
|
||||
rule = (rule, chmod_fun)
|
||||
|
||||
cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
|
||||
|
||||
if cls_str:
|
||||
setattr(cls, '__str__', self.cls_str)
|
||||
|
||||
if cls_keyword:
|
||||
setattr(cls, 'keyword', self.cls_keyword)
|
||||
|
||||
if scan:
|
||||
cls.scan = self.scan
|
||||
elif has_deps:
|
||||
def scan(self):
|
||||
nodes = []
|
||||
for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
|
||||
node = self.generator.path.find_resource(x)
|
||||
if not node:
|
||||
self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
|
||||
nodes.append(node)
|
||||
return [nodes, []]
|
||||
cls.scan = scan
|
||||
|
||||
# TODO use these values in the cache key if provided
|
||||
# (may cause excessive caching)
|
||||
for x in ('after', 'before', 'ext_in', 'ext_out'):
|
||||
setattr(cls, x, getattr(self, x, []))
|
||||
|
||||
if use_cache:
|
||||
cache[key] = cls
|
||||
|
||||
# now create one instance
|
||||
tsk = self.create_task(name)
|
||||
|
||||
if getattr(self, 'timeout', None):
|
||||
tsk.timeout = self.timeout
|
||||
|
||||
if getattr(self, 'always', None):
|
||||
tsk.always_run = True
|
||||
|
||||
if getattr(self, 'target', None):
|
||||
if isinstance(self.target, str):
|
||||
self.target = self.target.split()
|
||||
if not isinstance(self.target, list):
|
||||
self.target = [self.target]
|
||||
for x in self.target:
|
||||
if isinstance(x, str):
|
||||
tsk.outputs.append(self.path.find_or_declare(x))
|
||||
else:
|
||||
x.parent.mkdir() # if a node was given, create the required folders
|
||||
tsk.outputs.append(x)
|
||||
if getattr(self, 'install_path', None):
|
||||
self.install_task = self.add_install_files(install_to=self.install_path,
|
||||
install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))
|
||||
|
||||
if getattr(self, 'source', None):
|
||||
tsk.inputs = self.to_nodes(self.source)
|
||||
# bypass the execution of process_source by setting the source to an empty list
|
||||
self.source = []
|
||||
|
||||
if getattr(self, 'cwd', None):
|
||||
tsk.cwd = self.cwd
|
||||
|
||||
if isinstance(tsk.run, functools.partial):
|
||||
# Python documentation says: "partial objects defined in classes
|
||||
# behave like static methods and do not transform into bound
|
||||
# methods during instance attribute look-up."
|
||||
tsk.run = functools.partial(tsk.run, tsk)
|
||||
|
||||
|
||||
@feature('seq')
|
||||
def sequence_order(self):
|
||||
"""
|
||||
Adds a strict sequential constraint between the tasks generated by task generators.
|
||||
It works because task generators are posted in order.
|
||||
It will not post objects which belong to other folders.
|
||||
|
||||
Example::
|
||||
|
||||
bld(features='javac seq')
|
||||
bld(features='jar seq')
|
||||
|
||||
To start a new sequence, set the attribute seq_start, for example::
|
||||
|
||||
obj = bld(features='seq')
|
||||
obj.seq_start = True
|
||||
|
||||
Note that the method is executed in last position. This is more an
|
||||
example than a widely-used solution.
|
||||
"""
|
||||
if self.meths and self.meths[-1] != 'sequence_order':
|
||||
self.meths.append('sequence_order')
|
||||
return
|
||||
|
||||
if getattr(self, 'seq_start', None):
|
||||
return
|
||||
|
||||
# all the tasks previously declared must be run before these
|
||||
if getattr(self.bld, 'prev', None):
|
||||
self.bld.prev.post()
|
||||
for x in self.bld.prev.tasks:
|
||||
for y in self.tasks:
|
||||
y.set_run_after(x)
|
||||
|
||||
self.bld.prev = self
|
||||
|
||||
|
||||
re_m4 = re.compile('@(\w+)@', re.M)
|
||||
|
||||
class subst_pc(Task.Task):
|
||||
"""
|
||||
Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used
|
||||
in the substitution changes.
|
||||
"""
|
||||
|
||||
def force_permissions(self):
|
||||
"Private for the time being, we will probably refactor this into run_str=[run1,chmod]"
|
||||
if getattr(self.generator, 'chmod', None):
|
||||
for x in self.outputs:
|
||||
os.chmod(x.abspath(), self.generator.chmod)
|
||||
|
||||
def run(self):
|
||||
"Substitutes variables in a .in file"
|
||||
|
||||
if getattr(self.generator, 'is_copy', None):
|
||||
for i, x in enumerate(self.outputs):
|
||||
x.write(self.inputs[i].read('rb'), 'wb')
|
||||
self.force_permissions()
|
||||
return None
|
||||
|
||||
if getattr(self.generator, 'fun', None):
|
||||
ret = self.generator.fun(self)
|
||||
if not ret:
|
||||
self.force_permissions()
|
||||
return ret
|
||||
|
||||
code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
|
||||
if getattr(self.generator, 'subst_fun', None):
|
||||
code = self.generator.subst_fun(self, code)
|
||||
if code is not None:
|
||||
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
|
||||
self.force_permissions()
|
||||
return None
|
||||
|
||||
# replace all % by %% to prevent errors by % signs
|
||||
code = code.replace('%', '%%')
|
||||
|
||||
# extract the vars foo into lst and replace @foo@ by %(foo)s
|
||||
lst = []
|
||||
def repl(match):
|
||||
g = match.group
|
||||
if g(1):
|
||||
lst.append(g(1))
|
||||
return "%%(%s)s" % g(1)
|
||||
return ''
|
||||
global re_m4
|
||||
code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)
|
||||
|
||||
try:
|
||||
d = self.generator.dct
|
||||
except AttributeError:
|
||||
d = {}
|
||||
for x in lst:
|
||||
tmp = getattr(self.generator, x, '') or self.env[x] or self.env[x.upper()]
|
||||
try:
|
||||
tmp = ''.join(tmp)
|
||||
except TypeError:
|
||||
tmp = str(tmp)
|
||||
d[x] = tmp
|
||||
|
||||
code = code % d
|
||||
self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'ISO8859-1'))
|
||||
self.generator.bld.raw_deps[self.uid()] = lst
|
||||
|
||||
# make sure the signature is updated
|
||||
try: delattr(self, 'cache_sig')
|
||||
except AttributeError: pass
|
||||
|
||||
self.force_permissions()
|
||||
|
||||
def sig_vars(self):
|
||||
"""
|
||||
Compute a hash (signature) of the variables used in the substitution
|
||||
"""
|
||||
bld = self.generator.bld
|
||||
env = self.env
|
||||
upd = self.m.update
|
||||
|
||||
if getattr(self.generator, 'fun', None):
|
||||
upd(Utils.h_fun(self.generator.fun))
|
||||
if getattr(self.generator, 'subst_fun', None):
|
||||
upd(Utils.h_fun(self.generator.subst_fun))
|
||||
|
||||
# raw_deps: persistent custom values returned by the scanner
|
||||
vars = self.generator.bld.raw_deps.get(self.uid(), [])
|
||||
|
||||
# hash both env vars and task generator attributes
|
||||
act_sig = bld.hash_env_vars(env, vars)
|
||||
upd(act_sig)
|
||||
|
||||
lst = [getattr(self.generator, x, '') for x in vars]
|
||||
upd(Utils.h_list(lst))
|
||||
|
||||
return self.m.digest()
|
||||
|
||||
@extension('.pc.in')
|
||||
def add_pcfile(self, node):
|
||||
"""
|
||||
Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default
|
||||
|
||||
def build(bld):
|
||||
bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
|
||||
"""
|
||||
tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
|
||||
self.install_task = self.add_install_files(
|
||||
install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs)
|
||||
|
||||
class subst(subst_pc):
|
||||
pass
|
||||
|
||||
@feature('subst')
|
||||
@before_method('process_source', 'process_rule')
|
||||
def process_subst(self):
|
||||
"""
|
||||
Defines a transformation that substitutes the contents of *source* files to *target* files::
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features='subst',
|
||||
source='foo.c.in',
|
||||
target='foo.c',
|
||||
install_path='${LIBDIR}/pkgconfig',
|
||||
VAR = 'val'
|
||||
)
|
||||
|
||||
The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument
|
||||
of the task generator object.
|
||||
|
||||
This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`.
|
||||
"""
|
||||
|
||||
src = Utils.to_list(getattr(self, 'source', []))
|
||||
if isinstance(src, Node.Node):
|
||||
src = [src]
|
||||
tgt = Utils.to_list(getattr(self, 'target', []))
|
||||
if isinstance(tgt, Node.Node):
|
||||
tgt = [tgt]
|
||||
if len(src) != len(tgt):
|
||||
raise Errors.WafError('invalid number of source/target for %r' % self)
|
||||
|
||||
for x, y in zip(src, tgt):
|
||||
if not x or not y:
|
||||
raise Errors.WafError('null source or target for %r' % self)
|
||||
a, b = None, None
|
||||
|
||||
if isinstance(x, str) and isinstance(y, str) and x == y:
|
||||
a = self.path.find_node(x)
|
||||
b = self.path.get_bld().make_node(y)
|
||||
if not os.path.isfile(b.abspath()):
|
||||
b.parent.mkdir()
|
||||
else:
|
||||
if isinstance(x, str):
|
||||
a = self.path.find_resource(x)
|
||||
elif isinstance(x, Node.Node):
|
||||
a = x
|
||||
if isinstance(y, str):
|
||||
b = self.path.find_or_declare(y)
|
||||
elif isinstance(y, Node.Node):
|
||||
b = y
|
||||
|
||||
if not a:
|
||||
raise Errors.WafError('could not find %r for %r' % (x, self))
|
||||
|
||||
has_constraints = False
|
||||
tsk = self.create_task('subst', a, b)
|
||||
for k in ('after', 'before', 'ext_in', 'ext_out'):
|
||||
val = getattr(self, k, None)
|
||||
if val:
|
||||
has_constraints = True
|
||||
setattr(tsk, k, val)
|
||||
|
||||
# paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
|
||||
if not has_constraints:
|
||||
global HEADER_EXTS
|
||||
for xt in HEADER_EXTS:
|
||||
if b.name.endswith(xt):
|
||||
tsk.before = [k for k in ('c', 'cxx') if k in Task.classes]
|
||||
break
|
||||
|
||||
inst_to = getattr(self, 'install_path', None)
|
||||
if inst_to:
|
||||
self.install_task = self.add_install_files(install_to=inst_to,
|
||||
install_from=b, chmod=getattr(self, 'chmod', Utils.O644))
|
||||
|
||||
self.source = []
|
7
third_party/waf/waflib/Tools/__init__.py
vendored
Normal file
7
third_party/waf/waflib/Tools/__init__.py
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
27
third_party/waf/waflib/Tools/ar.py
vendored
Normal file
27
third_party/waf/waflib/Tools/ar.py
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2016 (ita)
|
||||
# Ralf Habacker, 2006 (rh)
|
||||
|
||||
"""
|
||||
The **ar** program creates static libraries. This tool is almost always loaded
|
||||
from others (C, C++, D, etc) for static library support.
|
||||
"""
|
||||
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_ar(conf):
|
||||
"""Configuration helper used by C/C++ tools to enable the support for static libraries"""
|
||||
conf.load('ar')
|
||||
|
||||
def configure(conf):
|
||||
"""Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
|
||||
conf.find_program('ar', var='AR')
|
||||
conf.add_os_flags('ARFLAGS')
|
||||
if not conf.env.ARFLAGS:
|
||||
conf.env.ARFLAGS = ['rcs']
|
77
third_party/waf/waflib/Tools/asm.py
vendored
Normal file
77
third_party/waf/waflib/Tools/asm.py
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2008-2016 (ita)
|
||||
|
||||
"""
|
||||
Assembly support, used by tools such as gas and nasm
|
||||
|
||||
To declare targets using assembly::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('gcc gas')
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features='c cstlib asm',
|
||||
source = 'test.S',
|
||||
target = 'asmtest')
|
||||
|
||||
bld(
|
||||
features='asm asmprogram',
|
||||
source = 'test.S',
|
||||
target = 'asmtest')
|
||||
|
||||
Support for pure asm programs and libraries should also work::
|
||||
|
||||
def configure(conf):
|
||||
conf.load('nasm')
|
||||
conf.find_program('ld', 'ASLINK')
|
||||
|
||||
def build(bld):
|
||||
bld(
|
||||
features='asm asmprogram',
|
||||
source = 'test.S',
|
||||
target = 'asmtest')
|
||||
"""
|
||||
|
||||
from waflib import Task
|
||||
from waflib.Tools.ccroot import link_task, stlink_task
|
||||
from waflib.TaskGen import extension
|
||||
|
||||
class asm(Task.Task):
|
||||
"""
|
||||
Compiles asm files by gas/nasm/yasm/...
|
||||
"""
|
||||
color = 'BLUE'
|
||||
run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
|
||||
|
||||
@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
|
||||
def asm_hook(self, node):
|
||||
"""
|
||||
Binds the asm extension to the asm task
|
||||
|
||||
:param node: input file
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
"""
|
||||
return self.create_compiled_task('asm', node)
|
||||
|
||||
class asmprogram(link_task):
|
||||
"Links object files into a c program"
|
||||
run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
|
||||
ext_out = ['.bin']
|
||||
inst_to = '${BINDIR}'
|
||||
|
||||
class asmshlib(asmprogram):
|
||||
"Links object files into a c shared library"
|
||||
inst_to = '${LIBDIR}'
|
||||
|
||||
class asmstlib(stlink_task):
|
||||
"Links object files into a c static library"
|
||||
pass # do not remove
|
||||
|
||||
def configure(conf):
|
||||
conf.env.ASMPATH_ST = '-I%s'
|
52
third_party/waf/waflib/Tools/bison.py
vendored
Normal file
52
third_party/waf/waflib/Tools/bison.py
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# John O'Meara, 2006
|
||||
# Thomas Nagy 2009-2016 (ita)
|
||||
|
||||
"""
|
||||
The **bison** program is a code generator which creates C or C++ files.
|
||||
The generated files are compiled into object files.
|
||||
"""
|
||||
|
||||
from waflib import Task
|
||||
from waflib.TaskGen import extension
|
||||
|
||||
class bison(Task.Task):
|
||||
"""Compiles bison files"""
|
||||
color = 'BLUE'
|
||||
run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
|
||||
ext_out = ['.h'] # just to make sure
|
||||
|
||||
@extension('.y', '.yc', '.yy')
|
||||
def big_bison(self, node):
|
||||
"""
|
||||
Creates a bison task, which must be executed from the directory of the output file.
|
||||
"""
|
||||
has_h = '-d' in self.env.BISONFLAGS
|
||||
|
||||
outs = []
|
||||
if node.name.endswith('.yc'):
|
||||
outs.append(node.change_ext('.tab.cc'))
|
||||
if has_h:
|
||||
outs.append(node.change_ext('.tab.hh'))
|
||||
else:
|
||||
outs.append(node.change_ext('.tab.c'))
|
||||
if has_h:
|
||||
outs.append(node.change_ext('.tab.h'))
|
||||
|
||||
tsk = self.create_task('bison', node, outs)
|
||||
tsk.cwd = node.parent.get_bld()
|
||||
|
||||
# and the c/cxx file must be compiled too
|
||||
self.source.append(outs[0])
|
||||
|
||||
def configure(conf):
|
||||
"""
|
||||
Detects the *bison* program
|
||||
"""
|
||||
conf.find_program('bison', var='BISON')
|
||||
conf.env.BISONFLAGS = ['-d']
|
42
third_party/waf/waflib/Tools/c.py
vendored
Normal file
42
third_party/waf/waflib/Tools/c.py
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2006-2016 (ita)
|
||||
|
||||
"Base for c programs/libraries"
|
||||
|
||||
from waflib import TaskGen, Task
|
||||
from waflib.Tools import c_preproc
|
||||
from waflib.Tools.ccroot import link_task, stlink_task
|
||||
|
||||
@TaskGen.extension('.c')
|
||||
def c_hook(self, node):
|
||||
"Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
|
||||
if not self.env.CC and self.env.CXX:
|
||||
return self.create_compiled_task('cxx', node)
|
||||
return self.create_compiled_task('c', node)
|
||||
|
||||
class c(Task.Task):
|
||||
"Compiles C files into object files"
|
||||
run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
|
||||
vars = ['CCDEPS'] # unused variable to depend on, just in case
|
||||
ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
|
||||
scan = c_preproc.scan
|
||||
|
||||
class cprogram(link_task):
|
||||
"Links object files into c programs"
|
||||
run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
|
||||
ext_out = ['.bin']
|
||||
vars = ['LINKDEPS']
|
||||
inst_to = '${BINDIR}'
|
||||
|
||||
class cshlib(cprogram):
|
||||
"Links object files into c shared libraries"
|
||||
inst_to = '${LIBDIR}'
|
||||
|
||||
class cstlib(stlink_task):
|
||||
"Links object files into a c static libraries"
|
||||
pass # do not remove
|
147
third_party/waf/waflib/Tools/c_aliases.py
vendored
Normal file
147
third_party/waf/waflib/Tools/c_aliases.py
vendored
Normal file
@ -0,0 +1,147 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2015 (ita)
|
||||
|
||||
"base for all c/c++ programs and libraries"
|
||||
|
||||
from waflib import Utils, Errors
|
||||
from waflib.Configure import conf
|
||||
|
||||
def get_extensions(lst):
|
||||
"""
|
||||
Returns the file extensions for the list of files given as input
|
||||
|
||||
:param lst: files to process
|
||||
:list lst: list of string or :py:class:`waflib.Node.Node`
|
||||
:return: list of file extensions
|
||||
:rtype: list of string
|
||||
"""
|
||||
ret = []
|
||||
for x in Utils.to_list(lst):
|
||||
if not isinstance(x, str):
|
||||
x = x.name
|
||||
ret.append(x[x.rfind('.') + 1:])
|
||||
return ret
|
||||
|
||||
def sniff_features(**kw):
|
||||
"""
|
||||
Computes and returns the features required for a task generator by
|
||||
looking at the file extensions. This aimed for C/C++ mainly::
|
||||
|
||||
snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
|
||||
# returns ['cxx', 'c', 'cxxshlib', 'cshlib']
|
||||
|
||||
:param source: source files to process
|
||||
:type source: list of string or :py:class:`waflib.Node.Node`
|
||||
:param type: object type in *program*, *shlib* or *stlib*
|
||||
:type type: string
|
||||
:return: the list of features for a task generator processing the source files
|
||||
:rtype: list of string
|
||||
"""
|
||||
exts = get_extensions(kw['source'])
|
||||
typ = kw['typ']
|
||||
feats = []
|
||||
|
||||
# watch the order, cxx will have the precedence
|
||||
for x in 'cxx cpp c++ cc C'.split():
|
||||
if x in exts:
|
||||
feats.append('cxx')
|
||||
break
|
||||
|
||||
if 'c' in exts or 'vala' in exts or 'gs' in exts:
|
||||
feats.append('c')
|
||||
|
||||
for x in 'f f90 F F90 for FOR'.split():
|
||||
if x in exts:
|
||||
feats.append('fc')
|
||||
break
|
||||
|
||||
if 'd' in exts:
|
||||
feats.append('d')
|
||||
|
||||
if 'java' in exts:
|
||||
feats.append('java')
|
||||
return 'java'
|
||||
|
||||
if typ in ('program', 'shlib', 'stlib'):
|
||||
will_link = False
|
||||
for x in feats:
|
||||
if x in ('cxx', 'd', 'fc', 'c'):
|
||||
feats.append(x + typ)
|
||||
will_link = True
|
||||
if not will_link and not kw.get('features', []):
|
||||
raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw)
|
||||
return feats
|
||||
|
||||
def set_features(kw, typ):
|
||||
"""
|
||||
Inserts data in the input dict *kw* based on existing data and on the type of target
|
||||
required (typ).
|
||||
|
||||
:param kw: task generator parameters
|
||||
:type kw: dict
|
||||
:param typ: type of target
|
||||
:type typ: string
|
||||
"""
|
||||
kw['typ'] = typ
|
||||
kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
|
||||
|
||||
@conf
|
||||
def program(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating programs by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.program(source='foo.c', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='c cprogram', source='foo.c', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'program')
|
||||
return bld(*k, **kw)
|
||||
|
||||
@conf
|
||||
def shlib(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating shared libraries by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='foo.c', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='c cshlib', source='foo.c', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'shlib')
|
||||
return bld(*k, **kw)
|
||||
|
||||
@conf
|
||||
def stlib(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating static libraries by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.stlib(source='foo.cpp', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='cxx cxxstlib', source='foo.cpp', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'stlib')
|
||||
return bld(*k, **kw)
|
||||
|
||||
@conf
|
||||
def objects(bld, *k, **kw):
|
||||
"""
|
||||
Alias for creating object files by looking at the file extensions::
|
||||
|
||||
def build(bld):
|
||||
bld.objects(source='foo.c', target='app')
|
||||
# equivalent to:
|
||||
# bld(features='c', source='foo.c', target='app')
|
||||
|
||||
"""
|
||||
set_features(kw, 'objects')
|
||||
return bld(*k, **kw)
|
1426
third_party/waf/waflib/Tools/c_config.py
vendored
Normal file
1426
third_party/waf/waflib/Tools/c_config.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
196
third_party/waf/waflib/Tools/c_osx.py
vendored
Normal file
196
third_party/waf/waflib/Tools/c_osx.py
vendored
Normal file
@ -0,0 +1,196 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy 2008-2016 (ita)
|
||||
|
||||
"""
|
||||
MacOSX related tools
|
||||
"""
|
||||
|
||||
import os, shutil, platform
|
||||
from waflib import Task, Utils
|
||||
from waflib.TaskGen import taskgen_method, feature, after_method, before_method
|
||||
|
||||
app_info = '''
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
|
||||
<plist version="0.9">
|
||||
<dict>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleGetInfoString</key>
|
||||
<string>Created by Waf</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>NOTE</key>
|
||||
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>{app_name}</string>
|
||||
</dict>
|
||||
</plist>
|
||||
'''
|
||||
"""
|
||||
plist template
|
||||
"""
|
||||
|
||||
@feature('c', 'cxx')
|
||||
def set_macosx_deployment_target(self):
|
||||
"""
|
||||
see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
|
||||
"""
|
||||
if self.env.MACOSX_DEPLOYMENT_TARGET:
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
|
||||
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
|
||||
if Utils.unversioned_sys_platform() == 'darwin':
|
||||
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
|
||||
|
||||
@taskgen_method
|
||||
def create_bundle_dirs(self, name, out):
|
||||
"""
|
||||
Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
|
||||
"""
|
||||
dir = out.parent.find_or_declare(name)
|
||||
dir.mkdir()
|
||||
macos = dir.find_or_declare(['Contents', 'MacOS'])
|
||||
macos.mkdir()
|
||||
return dir
|
||||
|
||||
def bundle_name_for_output(out):
|
||||
name = out.name
|
||||
k = name.rfind('.')
|
||||
if k >= 0:
|
||||
name = name[:k] + '.app'
|
||||
else:
|
||||
name = name + '.app'
|
||||
return name
|
||||
|
||||
@feature('cprogram', 'cxxprogram')
|
||||
@after_method('apply_link')
|
||||
def create_task_macapp(self):
|
||||
"""
|
||||
To compile an executable into a Mac application (a .app), set its *mac_app* attribute::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='foo', mac_app=True)
|
||||
|
||||
To force *all* executables to be transformed into Mac applications::
|
||||
|
||||
def build(bld):
|
||||
bld.env.MACAPP = True
|
||||
bld.shlib(source='a.c', target='foo')
|
||||
"""
|
||||
if self.env.MACAPP or getattr(self, 'mac_app', False):
|
||||
out = self.link_task.outputs[0]
|
||||
|
||||
name = bundle_name_for_output(out)
|
||||
dir = self.create_bundle_dirs(name, out)
|
||||
|
||||
n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
|
||||
|
||||
self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
|
||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
|
||||
self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)
|
||||
|
||||
if getattr(self, 'mac_files', None):
|
||||
# this only accepts files; they will be installed as seen from mac_files_root
|
||||
mac_files_root = getattr(self, 'mac_files_root', None)
|
||||
if isinstance(mac_files_root, str):
|
||||
mac_files_root = self.path.find_node(mac_files_root)
|
||||
if not mac_files_root:
|
||||
self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root)
|
||||
res_dir = n1.parent.parent.make_node('Resources')
|
||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
|
||||
for node in self.to_nodes(self.mac_files):
|
||||
relpath = node.path_from(mac_files_root or node.parent)
|
||||
self.create_task('macapp', node, res_dir.make_node(relpath))
|
||||
self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)
|
||||
|
||||
if getattr(self.bld, 'is_install', None):
|
||||
# disable regular binary installation
|
||||
self.install_task.hasrun = Task.SKIP_ME
|
||||
|
||||
@feature('cprogram', 'cxxprogram')
|
||||
@after_method('apply_link')
|
||||
def create_task_macplist(self):
|
||||
"""
|
||||
Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
|
||||
"""
|
||||
if self.env.MACAPP or getattr(self, 'mac_app', False):
|
||||
out = self.link_task.outputs[0]
|
||||
|
||||
name = bundle_name_for_output(out)
|
||||
|
||||
dir = self.create_bundle_dirs(name, out)
|
||||
n1 = dir.find_or_declare(['Contents', 'Info.plist'])
|
||||
self.plisttask = plisttask = self.create_task('macplist', [], n1)
|
||||
plisttask.context = {
|
||||
'app_name': self.link_task.outputs[0].name,
|
||||
'env': self.env
|
||||
}
|
||||
|
||||
plist_ctx = getattr(self, 'plist_context', None)
|
||||
if (plist_ctx):
|
||||
plisttask.context.update(plist_ctx)
|
||||
|
||||
if getattr(self, 'mac_plist', False):
|
||||
node = self.path.find_resource(self.mac_plist)
|
||||
if node:
|
||||
plisttask.inputs.append(node)
|
||||
else:
|
||||
plisttask.code = self.mac_plist
|
||||
else:
|
||||
plisttask.code = app_info
|
||||
|
||||
inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
|
||||
self.add_install_files(install_to=inst_to, install_from=n1)
|
||||
|
||||
@feature('cshlib', 'cxxshlib')
|
||||
@before_method('apply_link', 'propagate_uselib_vars')
|
||||
def apply_bundle(self):
|
||||
"""
|
||||
To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='foo', mac_bundle = True)
|
||||
|
||||
To force *all* executables to be transformed into bundles::
|
||||
|
||||
def build(bld):
|
||||
bld.env.MACBUNDLE = True
|
||||
bld.shlib(source='a.c', target='foo')
|
||||
"""
|
||||
if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
|
||||
self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
|
||||
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
|
||||
use = self.use = self.to_list(getattr(self, 'use', []))
|
||||
if not 'MACBUNDLE' in use:
|
||||
use.append('MACBUNDLE')
|
||||
|
||||
app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
|
||||
|
||||
class macapp(Task.Task):
|
||||
"""
|
||||
Creates mac applications
|
||||
"""
|
||||
color = 'PINK'
|
||||
def run(self):
|
||||
self.outputs[0].parent.mkdir()
|
||||
shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath())
|
||||
|
||||
class macplist(Task.Task):
|
||||
"""
|
||||
Creates plist files
|
||||
"""
|
||||
color = 'PINK'
|
||||
ext_in = ['.bin']
|
||||
def run(self):
|
||||
if getattr(self, 'code', None):
|
||||
txt = self.code
|
||||
else:
|
||||
txt = self.inputs[0].read()
|
||||
context = getattr(self, 'context', {})
|
||||
txt = txt.format(**context)
|
||||
self.outputs[0].write(txt)
|
1058
third_party/waf/waflib/Tools/c_preproc.py
vendored
Normal file
1058
third_party/waf/waflib/Tools/c_preproc.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
232
third_party/waf/waflib/Tools/c_tests.py
vendored
Normal file
232
third_party/waf/waflib/Tools/c_tests.py
vendored
Normal file
@ -0,0 +1,232 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2016 (ita)
|
||||
|
||||
"""
|
||||
Various configuration tests.
|
||||
"""
|
||||
|
||||
from waflib import Task
|
||||
from waflib.Configure import conf
|
||||
from waflib.TaskGen import feature, before_method, after_method
|
||||
|
||||
LIB_CODE = '''
|
||||
#ifdef _MSC_VER
|
||||
#define testEXPORT __declspec(dllexport)
|
||||
#else
|
||||
#define testEXPORT
|
||||
#endif
|
||||
testEXPORT int lib_func(void) { return 9; }
|
||||
'''
|
||||
|
||||
MAIN_CODE = '''
|
||||
#ifdef _MSC_VER
|
||||
#define testEXPORT __declspec(dllimport)
|
||||
#else
|
||||
#define testEXPORT
|
||||
#endif
|
||||
testEXPORT int lib_func(void);
|
||||
int main(int argc, char **argv) {
|
||||
(void)argc; (void)argv;
|
||||
return !(lib_func() == 9);
|
||||
}
|
||||
'''
|
||||
|
||||
@feature('link_lib_test')
|
||||
@before_method('process_source')
|
||||
def link_lib_test_fun(self):
|
||||
"""
|
||||
The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator,
|
||||
so we need to create other task generators from here to check if the linker is able to link libraries.
|
||||
"""
|
||||
def write_test_file(task):
|
||||
task.outputs[0].write(task.generator.code)
|
||||
|
||||
rpath = []
|
||||
if getattr(self, 'add_rpath', False):
|
||||
rpath = [self.bld.path.get_bld().abspath()]
|
||||
|
||||
mode = self.mode
|
||||
m = '%s %s' % (mode, mode)
|
||||
ex = self.test_exec and 'test_exec' or ''
|
||||
bld = self.bld
|
||||
bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE)
|
||||
bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE)
|
||||
bld(features='%sshlib' % m, source='test.' + mode, target='test')
|
||||
bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath)
|
||||
|
||||
@conf
|
||||
def check_library(self, mode=None, test_exec=True):
|
||||
"""
|
||||
Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
|
||||
|
||||
:param mode: c or cxx or d
|
||||
:type mode: string
|
||||
"""
|
||||
if not mode:
|
||||
mode = 'c'
|
||||
if self.env.CXX:
|
||||
mode = 'cxx'
|
||||
self.check(
|
||||
compile_filename = [],
|
||||
features = 'link_lib_test',
|
||||
msg = 'Checking for libraries',
|
||||
mode = mode,
|
||||
test_exec = test_exec)
|
||||
|
||||
########################################################################################
|
||||
|
||||
INLINE_CODE = '''
|
||||
typedef int foo_t;
|
||||
static %s foo_t static_foo () {return 0; }
|
||||
%s foo_t foo () {
|
||||
return 0;
|
||||
}
|
||||
'''
|
||||
INLINE_VALUES = ['inline', '__inline__', '__inline']
|
||||
|
||||
@conf
|
||||
def check_inline(self, **kw):
|
||||
"""
|
||||
Checks for the right value for inline macro.
|
||||
Define INLINE_MACRO to 1 if the define is found.
|
||||
If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
|
||||
|
||||
:param define_name: define INLINE_MACRO by default to 1 if the macro is defined
|
||||
:type define_name: string
|
||||
:param features: by default *c* or *cxx* depending on the compiler present
|
||||
:type features: list of string
|
||||
"""
|
||||
self.start_msg('Checking for inline')
|
||||
|
||||
if not 'define_name' in kw:
|
||||
kw['define_name'] = 'INLINE_MACRO'
|
||||
if not 'features' in kw:
|
||||
if self.env.CXX:
|
||||
kw['features'] = ['cxx']
|
||||
else:
|
||||
kw['features'] = ['c']
|
||||
|
||||
for x in INLINE_VALUES:
|
||||
kw['fragment'] = INLINE_CODE % (x, x)
|
||||
|
||||
try:
|
||||
self.check(**kw)
|
||||
except self.errors.ConfigurationError:
|
||||
continue
|
||||
else:
|
||||
self.end_msg(x)
|
||||
if x != 'inline':
|
||||
self.define('inline', x, quote=False)
|
||||
return x
|
||||
self.fatal('could not use inline functions')
|
||||
|
||||
########################################################################################
|
||||
|
||||
LARGE_FRAGMENT = '''#include <unistd.h>
|
||||
int main(int argc, char **argv) {
|
||||
(void)argc; (void)argv;
|
||||
return !(sizeof(off_t) >= 8);
|
||||
}
|
||||
'''
|
||||
|
||||
@conf
|
||||
def check_large_file(self, **kw):
|
||||
"""
|
||||
Checks for large file support and define the macro HAVE_LARGEFILE
|
||||
The test is skipped on win32 systems (DEST_BINFMT == pe).
|
||||
|
||||
:param define_name: define to set, by default *HAVE_LARGEFILE*
|
||||
:type define_name: string
|
||||
:param execute: execute the test (yes by default)
|
||||
:type execute: bool
|
||||
"""
|
||||
if not 'define_name' in kw:
|
||||
kw['define_name'] = 'HAVE_LARGEFILE'
|
||||
if not 'execute' in kw:
|
||||
kw['execute'] = True
|
||||
|
||||
if not 'features' in kw:
|
||||
if self.env.CXX:
|
||||
kw['features'] = ['cxx', 'cxxprogram']
|
||||
else:
|
||||
kw['features'] = ['c', 'cprogram']
|
||||
|
||||
kw['fragment'] = LARGE_FRAGMENT
|
||||
|
||||
kw['msg'] = 'Checking for large file support'
|
||||
ret = True
|
||||
try:
|
||||
if self.env.DEST_BINFMT != 'pe':
|
||||
ret = self.check(**kw)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
if ret:
|
||||
return True
|
||||
|
||||
kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64'
|
||||
kw['defines'] = ['_FILE_OFFSET_BITS=64']
|
||||
try:
|
||||
ret = self.check(**kw)
|
||||
except self.errors.ConfigurationError:
|
||||
pass
|
||||
else:
|
||||
self.define('_FILE_OFFSET_BITS', 64)
|
||||
return ret
|
||||
|
||||
self.fatal('There is no support for large files')
|
||||
|
||||
########################################################################################
|
||||
|
||||
ENDIAN_FRAGMENT = '''
|
||||
short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
|
||||
short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
|
||||
int use_ascii (int i) {
|
||||
return ascii_mm[i] + ascii_ii[i];
|
||||
}
|
||||
short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
|
||||
short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
|
||||
int use_ebcdic (int i) {
|
||||
return ebcdic_mm[i] + ebcdic_ii[i];
|
||||
}
|
||||
extern int foo;
|
||||
'''
|
||||
|
||||
class grep_for_endianness(Task.Task):
|
||||
"""
|
||||
Task that reads a binary and tries to determine the endianness
|
||||
"""
|
||||
color = 'PINK'
|
||||
def run(self):
|
||||
txt = self.inputs[0].read(flags='rb').decode('iso8859-1')
|
||||
if txt.find('LiTTleEnDian') > -1:
|
||||
self.generator.tmp.append('little')
|
||||
elif txt.find('BIGenDianSyS') > -1:
|
||||
self.generator.tmp.append('big')
|
||||
else:
|
||||
return -1
|
||||
|
||||
@feature('grep_for_endianness')
|
||||
@after_method('process_source')
|
||||
def grep_for_endianness_fun(self):
|
||||
"""
|
||||
Used by the endianness configuration test
|
||||
"""
|
||||
self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
|
||||
|
||||
@conf
|
||||
def check_endianness(self):
|
||||
"""
|
||||
Executes a configuration test to determine the endianness
|
||||
"""
|
||||
tmp = []
|
||||
def check_msg(self):
|
||||
return tmp[0]
|
||||
self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
|
||||
msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
|
||||
return tmp[0]
|
772
third_party/waf/waflib/Tools/ccroot.py
vendored
Normal file
772
third_party/waf/waflib/Tools/ccroot.py
vendored
Normal file
@ -0,0 +1,772 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Thomas Nagy, 2005-2016 (ita)
|
||||
|
||||
"""
|
||||
Classes and methods shared by tools providing support for C-like language such
|
||||
as C/C++/D/Assembly/Go (this support module is almost never used alone).
|
||||
"""
|
||||
|
||||
import os, re
|
||||
from waflib import Task, Utils, Node, Errors, Logs
|
||||
from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
|
||||
from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
|
||||
from waflib.Configure import conf
|
||||
|
||||
SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']
|
||||
|
||||
USELIB_VARS = Utils.defaultdict(set)
|
||||
"""
|
||||
Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`.
|
||||
"""
|
||||
|
||||
USELIB_VARS['c'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH'])
|
||||
USELIB_VARS['cxx'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH'])
|
||||
USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS'])
|
||||
USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH'])
|
||||
|
||||
USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
|
||||
USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
|
||||
USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS'])
|
||||
|
||||
USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
|
||||
USELIB_VARS['dshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
|
||||
USELIB_VARS['dstlib'] = set(['ARFLAGS', 'LINKDEPS'])
|
||||
|
||||
USELIB_VARS['asm'] = set(['ASFLAGS'])
|
||||
|
||||
# =================================================================================================
|
||||
|
||||
@taskgen_method
|
||||
def create_compiled_task(self, name, node):
|
||||
"""
|
||||
Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension).
|
||||
The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link`
|
||||
|
||||
:param name: name of the task class
|
||||
:type name: string
|
||||
:param node: the file to compile
|
||||
:type node: :py:class:`waflib.Node.Node`
|
||||
:return: The task created
|
||||
:rtype: :py:class:`waflib.Task.Task`
|
||||
"""
|
||||
out = '%s.%d.o' % (node.name, self.idx)
|
||||
task = self.create_task(name, node, node.parent.find_or_declare(out))
|
||||
try:
|
||||
self.compiled_tasks.append(task)
|
||||
except AttributeError:
|
||||
self.compiled_tasks = [task]
|
||||
return task
|
||||
|
||||
@taskgen_method
|
||||
def to_incnodes(self, inlst):
|
||||
"""
|
||||
Task generator method provided to convert a list of string/nodes into a list of includes folders.
|
||||
|
||||
The paths are assumed to be relative to the task generator path, except if they begin by **#**
|
||||
in which case they are searched from the top-level directory (``bld.srcnode``).
|
||||
The folders are simply assumed to be existing.
|
||||
|
||||
The node objects in the list are returned in the output list. The strings are converted
|
||||
into node objects if possible. The node is searched from the source directory, and if a match is found,
|
||||
the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored.
|
||||
|
||||
:param inlst: list of folders
|
||||
:type inlst: space-delimited string or a list of string/nodes
|
||||
:rtype: list of :py:class:`waflib.Node.Node`
|
||||
:return: list of include folders as nodes
|
||||
"""
|
||||
lst = []
|
||||
seen = set()
|
||||
for x in self.to_list(inlst):
|
||||
if x in seen or not x:
|
||||
continue
|
||||
seen.add(x)
|
||||
|
||||
# with a real lot of targets, it is sometimes interesting to cache the results below
|
||||
if isinstance(x, Node.Node):
|
||||
lst.append(x)
|
||||
else:
|
||||
if os.path.isabs(x):
|
||||
lst.append(self.bld.root.make_node(x) or x)
|
||||
else:
|
||||
if x[0] == '#':
|
||||
p = self.bld.bldnode.make_node(x[1:])
|
||||
v = self.bld.srcnode.make_node(x[1:])
|
||||
else:
|
||||
p = self.path.get_bld().make_node(x)
|
||||
v = self.path.make_node(x)
|
||||
if p.is_child_of(self.bld.bldnode):
|
||||
p.mkdir()
|
||||
lst.append(p)
|
||||
lst.append(v)
|
||||
return lst
|
||||
|
||||
@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
|
||||
@after_method('propagate_uselib_vars', 'process_source')
|
||||
def apply_incpaths(self):
|
||||
"""
|
||||
Task generator method that processes the attribute *includes*::
|
||||
|
||||
tg = bld(features='includes', includes='.')
|
||||
|
||||
The folders only need to be relative to the current directory, the equivalent build directory is
|
||||
added automatically (for headers created in the build directory). This enable using a build directory
|
||||
or not (``top == out``).
|
||||
|
||||
This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``,
|
||||
and the list of include paths in ``tg.env.INCLUDES``.
|
||||
"""
|
||||
|
||||
lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
|
||||
self.includes_nodes = lst
|
||||
cwd = self.get_cwd()
|
||||
self.env.INCPATHS = [x.path_from(cwd) for x in lst]
|
||||
|
||||
class link_task(Task.Task):
|
||||
"""
|
||||
Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`.
|
||||
|
||||
.. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib
|
||||
"""
|
||||
color = 'YELLOW'
|
||||
|
||||
inst_to = None
|
||||
"""Default installation path for the link task outputs, or None to disable"""
|
||||
|
||||
chmod = Utils.O755
|
||||
"""Default installation mode for the link task outputs"""
|
||||
|
||||
def add_target(self, target):
|
||||
"""
|
||||
Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*.
|
||||
The settings are retrieved from ``env.clsname_PATTERN``
|
||||
"""
|
||||
if isinstance(target, str):
|
||||
base = self.generator.path
|
||||
if target.startswith('#'):
|
||||
# for those who like flat structures
|
||||
target = target[1:]
|
||||
base = self.generator.bld.bldnode
|
||||
|
||||
pattern = self.env[self.__class__.__name__ + '_PATTERN']
|
||||
if not pattern:
|
||||
pattern = '%s'
|
||||
folder, name = os.path.split(target)
|
||||
|
||||
if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None):
|
||||
nums = self.generator.vnum.split('.')
|
||||
if self.env.DEST_BINFMT == 'pe':
|
||||
# include the version in the dll file name,
|
||||
# the import lib file name stays unversionned.
|
||||
name = name + '-' + nums[0]
|
||||
elif self.env.DEST_OS == 'openbsd':
|
||||
pattern = '%s.%s' % (pattern, nums[0])
|
||||
if len(nums) >= 2:
|
||||
pattern += '.%s' % nums[1]
|
||||
|
||||
if folder:
|
||||
tmp = folder + os.sep + pattern % name
|
||||
else:
|
||||
tmp = pattern % name
|
||||
target = base.find_or_declare(tmp)
|
||||
self.set_outputs(target)
|
||||
|
||||
def exec_command(self, *k, **kw):
|
||||
ret = super(link_task, self).exec_command(*k, **kw)
|
||||
if not ret and self.env.DO_MANIFEST:
|
||||
ret = self.exec_mf()
|
||||
return ret
|
||||
|
||||
def exec_mf(self):
|
||||
"""
|
||||
Create manifest files for VS-like compilers (msvc, ifort, ...)
|
||||
"""
|
||||
if not self.env.MT:
|
||||
return 0
|
||||
|
||||
manifest = None
|
||||
for out_node in self.outputs:
|
||||
if out_node.name.endswith('.manifest'):
|
||||
manifest = out_node.abspath()
|
||||
break
|
||||
else:
|
||||
# Should never get here. If we do, it means the manifest file was
|
||||
# never added to the outputs list, thus we don't have a manifest file
|
||||
# to embed, so we just return.
|
||||
return 0
|
||||
|
||||
# embedding mode. Different for EXE's and DLL's.
|
||||
# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
|
||||
mode = ''
|
||||
for x in Utils.to_list(self.generator.features):
|
||||
if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
|
||||
mode = 1
|
||||
elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
|
||||
mode = 2
|
||||
|
||||
Logs.debug('msvc: embedding manifest in mode %r', mode)
|
||||
|
||||
lst = [] + self.env.MT
|
||||
lst.extend(Utils.to_list(self.env.MTFLAGS))
|
||||
lst.extend(['-manifest', manifest])
|
||||
lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))
|
||||
|
||||
return super(link_task, self).exec_command(lst)
|
||||
|
||||
class stlink_task(link_task):
|
||||
"""
|
||||
Base for static link tasks, which use *ar* most of the time.
|
||||
The target is always removed before being written.
|
||||
"""
|
||||
run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
|
||||
|
||||
chmod = Utils.O644
|
||||
"""Default installation mode for the static libraries"""
|
||||
|
||||
def rm_tgt(cls):
|
||||
old = cls.run
|
||||
def wrap(self):
|
||||
try: os.remove(self.outputs[0].abspath())
|
||||
except OSError: pass
|
||||
return old(self)
|
||||
setattr(cls, 'run', wrap)
|
||||
rm_tgt(stlink_task)
|
||||
|
||||
@feature('c', 'cxx', 'd', 'fc', 'asm')
|
||||
@after_method('process_source')
|
||||
def apply_link(self):
|
||||
"""
|
||||
Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and
|
||||
use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task
|
||||
matching a name from the attribute *features*, for example::
|
||||
|
||||
def build(bld):
|
||||
tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app')
|
||||
|
||||
will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram`
|
||||
"""
|
||||
|
||||
for x in self.features:
|
||||
if x == 'cprogram' and 'cxx' in self.features: # limited compat
|
||||
x = 'cxxprogram'
|
||||
elif x == 'cshlib' and 'cxx' in self.features:
|
||||
x = 'cxxshlib'
|
||||
|
||||
if x in Task.classes:
|
||||
if issubclass(Task.classes[x], link_task):
|
||||
link = x
|
||||
break
|
||||
else:
|
||||
return
|
||||
|
||||
objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])]
|
||||
self.link_task = self.create_task(link, objs)
|
||||
self.link_task.add_target(self.target)
|
||||
|
||||
# remember that the install paths are given by the task generators
|
||||
try:
|
||||
inst_to = self.install_path
|
||||
except AttributeError:
|
||||
inst_to = self.link_task.__class__.inst_to
|
||||
if inst_to:
|
||||
# install a copy of the node list we have at this moment (implib not added)
|
||||
self.install_task = self.add_install_files(
|
||||
install_to=inst_to, install_from=self.link_task.outputs[:],
|
||||
chmod=self.link_task.chmod, task=self.link_task)
|
||||
|
||||
@taskgen_method
|
||||
def use_rec(self, name, **kw):
|
||||
"""
|
||||
Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use``
|
||||
"""
|
||||
|
||||
if name in self.tmp_use_not or name in self.tmp_use_seen:
|
||||
return
|
||||
|
||||
try:
|
||||
y = self.bld.get_tgen_by_name(name)
|
||||
except Errors.WafError:
|
||||
self.uselib.append(name)
|
||||
self.tmp_use_not.add(name)
|
||||
return
|
||||
|
||||
self.tmp_use_seen.append(name)
|
||||
y.post()
|
||||
|
||||
# bind temporary attributes on the task generator
|
||||
y.tmp_use_objects = objects = kw.get('objects', True)
|
||||
y.tmp_use_stlib = stlib = kw.get('stlib', True)
|
||||
try:
|
||||
link_task = y.link_task
|
||||
except AttributeError:
|
||||
y.tmp_use_var = ''
|
||||
else:
|
||||
objects = False
|
||||
if not isinstance(link_task, stlink_task):
|
||||
stlib = False
|
||||
y.tmp_use_var = 'LIB'
|
||||
else:
|
||||
y.tmp_use_var = 'STLIB'
|
||||
|
||||
p = self.tmp_use_prec
|
||||
for x in self.to_list(getattr(y, 'use', [])):
|
||||
if self.env["STLIB_" + x]:
|
||||
continue
|
||||
try:
|
||||
p[x].append(name)
|
||||
except KeyError:
|
||||
p[x] = [name]
|
||||
self.use_rec(x, objects=objects, stlib=stlib)
|
||||
|
||||
@feature('c', 'cxx', 'd', 'use', 'fc')
|
||||
@before_method('apply_incpaths', 'propagate_uselib_vars')
|
||||
@after_method('apply_link', 'process_source')
|
||||
def process_use(self):
|
||||
"""
|
||||
Process the ``use`` attribute which contains a list of task generator names::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='lib1')
|
||||
bld.program(source='main.c', target='app', use='lib1')
|
||||
|
||||
See :py:func:`waflib.Tools.ccroot.use_rec`.
|
||||
"""
|
||||
|
||||
use_not = self.tmp_use_not = set()
|
||||
self.tmp_use_seen = [] # we would like an ordered set
|
||||
use_prec = self.tmp_use_prec = {}
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
self.includes = self.to_list(getattr(self, 'includes', []))
|
||||
names = self.to_list(getattr(self, 'use', []))
|
||||
|
||||
for x in names:
|
||||
self.use_rec(x)
|
||||
|
||||
for x in use_not:
|
||||
if x in use_prec:
|
||||
del use_prec[x]
|
||||
|
||||
# topological sort
|
||||
out = self.tmp_use_sorted = []
|
||||
tmp = []
|
||||
for x in self.tmp_use_seen:
|
||||
for k in use_prec.values():
|
||||
if x in k:
|
||||
break
|
||||
else:
|
||||
tmp.append(x)
|
||||
|
||||
while tmp:
|
||||
e = tmp.pop()
|
||||
out.append(e)
|
||||
try:
|
||||
nlst = use_prec[e]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
del use_prec[e]
|
||||
for x in nlst:
|
||||
for y in use_prec:
|
||||
if x in use_prec[y]:
|
||||
break
|
||||
else:
|
||||
tmp.append(x)
|
||||
if use_prec:
|
||||
raise Errors.WafError('Cycle detected in the use processing %r' % use_prec)
|
||||
out.reverse()
|
||||
|
||||
link_task = getattr(self, 'link_task', None)
|
||||
for x in out:
|
||||
y = self.bld.get_tgen_by_name(x)
|
||||
var = y.tmp_use_var
|
||||
if var and link_task:
|
||||
if var == 'LIB' or y.tmp_use_stlib or x in names:
|
||||
self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
|
||||
self.link_task.dep_nodes.extend(y.link_task.outputs)
|
||||
tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
|
||||
self.env.append_unique(var + 'PATH', [tmp_path])
|
||||
else:
|
||||
if y.tmp_use_objects:
|
||||
self.add_objects_from_tgen(y)
|
||||
|
||||
if getattr(y, 'export_includes', None):
|
||||
self.includes.extend(y.to_incnodes(y.export_includes))
|
||||
|
||||
if getattr(y, 'export_defines', None):
|
||||
self.env.append_value('DEFINES', self.to_list(y.export_defines))
|
||||
|
||||
|
||||
# and finally, add the use variables (no recursion needed)
|
||||
for x in names:
|
||||
try:
|
||||
y = self.bld.get_tgen_by_name(x)
|
||||
except Errors.WafError:
|
||||
if not self.env['STLIB_' + x] and not x in self.uselib:
|
||||
self.uselib.append(x)
|
||||
else:
|
||||
for k in self.to_list(getattr(y, 'use', [])):
|
||||
if not self.env['STLIB_' + k] and not k in self.uselib:
|
||||
self.uselib.append(k)
|
||||
|
||||
@taskgen_method
|
||||
def accept_node_to_link(self, node):
|
||||
"""
|
||||
PRIVATE INTERNAL USE ONLY
|
||||
"""
|
||||
return not node.name.endswith('.pdb')
|
||||
|
||||
@taskgen_method
|
||||
def add_objects_from_tgen(self, tg):
|
||||
"""
|
||||
Add the objects from the depending compiled tasks as link task inputs.
|
||||
|
||||
Some objects are filtered: for instance, .pdb files are added
|
||||
to the compiled tasks but not to the link tasks (to avoid errors)
|
||||
PRIVATE INTERNAL USE ONLY
|
||||
"""
|
||||
try:
|
||||
link_task = self.link_task
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
for tsk in getattr(tg, 'compiled_tasks', []):
|
||||
for x in tsk.outputs:
|
||||
if self.accept_node_to_link(x):
|
||||
link_task.inputs.append(x)
|
||||
|
||||
@taskgen_method
|
||||
def get_uselib_vars(self):
|
||||
"""
|
||||
:return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
|
||||
:rtype: list of string
|
||||
"""
|
||||
_vars = set()
|
||||
for x in self.features:
|
||||
if x in USELIB_VARS:
|
||||
_vars |= USELIB_VARS[x]
|
||||
return _vars
|
||||
|
||||
@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm')
|
||||
@after_method('process_use')
|
||||
def propagate_uselib_vars(self):
|
||||
"""
|
||||
Process uselib variables for adding flags. For example, the following target::
|
||||
|
||||
def build(bld):
|
||||
bld.env.AFLAGS_aaa = ['bar']
|
||||
from waflib.Tools.ccroot import USELIB_VARS
|
||||
USELIB_VARS['aaa'] = ['AFLAGS']
|
||||
|
||||
tg = bld(features='aaa', aflags='test')
|
||||
|
||||
The *aflags* attribute will be processed and this method will set::
|
||||
|
||||
tg.env.AFLAGS = ['bar', 'test']
|
||||
"""
|
||||
_vars = self.get_uselib_vars()
|
||||
env = self.env
|
||||
app = env.append_value
|
||||
feature_uselib = self.features + self.to_list(getattr(self, 'uselib', []))
|
||||
for var in _vars:
|
||||
y = var.lower()
|
||||
val = getattr(self, y, [])
|
||||
if val:
|
||||
app(var, self.to_list(val))
|
||||
|
||||
for x in feature_uselib:
|
||||
val = env['%s_%s' % (var, x)]
|
||||
if val:
|
||||
app(var, val)
|
||||
|
||||
# ============ the code above must not know anything about import libs ==========
|
||||
|
||||
@feature('cshlib', 'cxxshlib', 'fcshlib')
|
||||
@after_method('apply_link')
|
||||
def apply_implib(self):
|
||||
"""
|
||||
Handle dlls and their import libs on Windows-like systems.
|
||||
|
||||
A ``.dll.a`` file called *import library* is generated.
|
||||
It must be installed as it is required for linking the library.
|
||||
"""
|
||||
if not self.env.DEST_BINFMT == 'pe':
|
||||
return
|
||||
|
||||
dll = self.link_task.outputs[0]
|
||||
if isinstance(self.target, Node.Node):
|
||||
name = self.target.name
|
||||
else:
|
||||
name = os.path.split(self.target)[1]
|
||||
implib = self.env.implib_PATTERN % name
|
||||
implib = dll.parent.find_or_declare(implib)
|
||||
self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
|
||||
self.link_task.outputs.append(implib)
|
||||
|
||||
if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
|
||||
node = self.path.find_resource(self.defs)
|
||||
if not node:
|
||||
raise Errors.WafError('invalid def file %r' % self.defs)
|
||||
if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
|
||||
self.env.append_value('LINKFLAGS', '/def:%s' % node.path_from(self.get_cwd()))
|
||||
self.link_task.dep_nodes.append(node)
|
||||
else:
|
||||
#gcc for windows takes *.def file a an input without any special flag
|
||||
self.link_task.inputs.append(node)
|
||||
|
||||
# where to put the import library
|
||||
if getattr(self, 'install_task', None):
|
||||
try:
|
||||
# user has given a specific installation path for the import library
|
||||
inst_to = self.install_path_implib
|
||||
except AttributeError:
|
||||
try:
|
||||
# user has given an installation path for the main library, put the import library in it
|
||||
inst_to = self.install_path
|
||||
except AttributeError:
|
||||
# else, put the library in BINDIR and the import library in LIBDIR
|
||||
inst_to = '${IMPLIBDIR}'
|
||||
self.install_task.install_to = '${BINDIR}'
|
||||
if not self.env.IMPLIBDIR:
|
||||
self.env.IMPLIBDIR = self.env.LIBDIR
|
||||
self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
|
||||
chmod=self.link_task.chmod, task=self.link_task)
|
||||
|
||||
# ============ the code above must not know anything about vnum processing on unix platforms =========
|
||||
|
||||
re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$')
|
||||
@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum')
|
||||
@after_method('apply_link', 'propagate_uselib_vars')
|
||||
def apply_vnum(self):
|
||||
"""
|
||||
Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots::
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='foo', vnum='14.15.16')
|
||||
|
||||
In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created:
|
||||
|
||||
* ``libfoo.so → libfoo.so.14.15.16``
|
||||
* ``libfoo.so.14 → libfoo.so.14.15.16``
|
||||
|
||||
By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter:
|
||||
|
||||
def build(bld):
|
||||
bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15')
|
||||
|
||||
In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library.
|
||||
|
||||
On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library.
|
||||
"""
|
||||
if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
|
||||
return
|
||||
|
||||
link = self.link_task
|
||||
if not re_vnum.match(self.vnum):
|
||||
raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self)))
|
||||
nums = self.vnum.split('.')
|
||||
node = link.outputs[0]
|
||||
|
||||
cnum = getattr(self, 'cnum', str(nums[0]))
|
||||
cnums = cnum.split('.')
|
||||
if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums:
|
||||
raise Errors.WafError('invalid compatibility version %s' % cnum)
|
||||
|
||||
libname = node.name
|
||||
if libname.endswith('.dylib'):
|
||||
name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
|
||||
name2 = libname.replace('.dylib', '.%s.dylib' % cnum)
|
||||
else:
|
||||
name3 = libname + '.' + self.vnum
|
||||
name2 = libname + '.' + cnum
|
||||
|
||||
# add the so name for the ld linker - to disable, just unset env.SONAME_ST
|
||||
if self.env.SONAME_ST:
|
||||
v = self.env.SONAME_ST % name2
|
||||
self.env.append_value('LINKFLAGS', v.split())
|
||||
|
||||
# the following task is just to enable execution from the build dir :-/
|
||||
if self.env.DEST_OS != 'openbsd':
|
||||
outs = [node.parent.make_node(name3)]
|
||||
if name2 != name3:
|
||||
outs.append(node.parent.make_node(name2))
|
||||
self.create_task('vnum', node, outs)
|
||||
|
||||
if getattr(self, 'install_task', None):
|
||||
self.install_task.hasrun = Task.SKIP_ME
|
||||
path = self.install_task.install_to
|
||||
if self.env.DEST_OS == 'openbsd':
|
||||
libname = self.link_task.outputs[0].name
|
||||
t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
|
||||
self.vnum_install_task = (t1,)
|
||||
else:
|
||||
t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
|
||||
t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
|
||||
if name2 != name3:
|
||||
t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
|
||||
self.vnum_install_task = (t1, t2, t3)
|
||||
else:
|
||||
self.vnum_install_task = (t1, t3)
|
||||
|
||||
if '-dynamiclib' in self.env.LINKFLAGS:
|
||||
# this requires after(propagate_uselib_vars)
|
||||
try:
|
||||
inst_to = self.install_path
|
||||
except AttributeError:
|
||||
inst_to = self.link_task.__class__.inst_to
|
||||
if inst_to:
|
||||
p = Utils.subst_vars(inst_to, self.env)
|
||||
path = os.path.join(p, name2)
|
||||
self.env.append_value('LINKFLAGS', ['-install_name', path])
|
||||
self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum)
|
||||
self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum)
|
||||
|
||||
class vnum(Task.Task):
|
||||
"""
|
||||
Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
|
||||
"""
|
||||
color = 'CYAN'
|
||||
ext_in = ['.bin']
|
||||
def keyword(self):
|
||||
return 'Symlinking'
|
||||
def run(self):
|
||||
for x in self.outputs:
|
||||
path = x.abspath()
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
try:
|
||||
os.symlink(self.inputs[0].name, path)
|
||||
except OSError:
|
||||
return 1
|
||||
|
||||
class fake_shlib(link_task):
|
||||
"""
|
||||
Task used for reading a system library and adding the dependency on it
|
||||
"""
|
||||
def runnable_status(self):
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return Task.ASK_LATER
|
||||
return Task.SKIP_ME
|
||||
|
||||
class fake_stlib(stlink_task):
|
||||
"""
|
||||
Task used for reading a system library and adding the dependency on it
|
||||
"""
|
||||
def runnable_status(self):
|
||||
for t in self.run_after:
|
||||
if not t.hasrun:
|
||||
return Task.ASK_LATER
|
||||
return Task.SKIP_ME
|
||||
|
||||
@conf
|
||||
def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]):
|
||||
"""
|
||||
Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes::
|
||||
|
||||
def build(bld):
|
||||
bld.read_shlib('m')
|
||||
bld.program(source='main.c', use='m')
|
||||
"""
|
||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines)
|
||||
|
||||
@conf
|
||||
def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]):
|
||||
"""
|
||||
Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes.
|
||||
"""
|
||||
return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines)
|
||||
|
||||
lib_patterns = {
|
||||
'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'],
|
||||
'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'],
|
||||
}
|
||||
|
||||
@feature('fake_lib')
|
||||
def process_lib(self):
|
||||
"""
|
||||
Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`.
|
||||
"""
|
||||
node = None
|
||||
|
||||
names = [x % self.name for x in lib_patterns[self.lib_type]]
|
||||
for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS:
|
||||
if not isinstance(x, Node.Node):
|
||||
x = self.bld.root.find_node(x) or self.path.find_node(x)
|
||||
if not x:
|
||||
continue
|
||||
|
||||
for y in names:
|
||||
node = x.find_node(y)
|
||||
if node:
|
||||
try:
|
||||
Utils.h_file(node.abspath())
|
||||
except EnvironmentError:
|
||||
raise ValueError('Could not read %r' % y)
|
||||
break
|
||||
else:
|
||||
continue
|
||||
break
|
||||
else:
|
||||
raise Errors.WafError('could not find library %r' % self.name)
|
||||
self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node])
|
||||
self.target = self.name
|
||||
|
||||
|
||||
class fake_o(Task.Task):
|
||||
def runnable_status(self):
|
||||
return Task.SKIP_ME
|
||||
|
||||
@extension('.o', '.obj')
|
||||
def add_those_o_files(self, node):
|
||||
tsk = self.create_task('fake_o', [], node)
|
||||
try:
|
||||
self.compiled_tasks.append(tsk)
|
||||
except AttributeError:
|
||||
self.compiled_tasks = [tsk]
|
||||
|
||||
@feature('fake_obj')
|
||||
@before_method('process_source')
|
||||
def process_objs(self):
|
||||
"""
|
||||
Puts object files in the task generator outputs
|
||||
"""
|
||||
for node in self.to_nodes(self.source):
|
||||
self.add_those_o_files(node)
|
||||
self.source = []
|
||||
|
||||
@conf
|
||||
def read_object(self, obj):
|
||||
"""
|
||||
Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes.
|
||||
|
||||
:param obj: object file path, as string or Node
|
||||
"""
|
||||
if not isinstance(obj, self.path.__class__):
|
||||
obj = self.path.find_resource(obj)
|
||||
return self(features='fake_obj', source=obj, name=obj.name)
|
||||
|
||||
@feature('cxxprogram', 'cprogram')
|
||||
@after_method('apply_link', 'process_use')
|
||||
def set_full_paths_hpux(self):
|
||||
"""
|
||||
On hp-ux, extend the libpaths and static library paths to absolute paths
|
||||
"""
|
||||
if self.env.DEST_OS != 'hp-ux':
|
||||
return
|
||||
base = self.bld.bldnode.abspath()
|
||||
for var in ['LIBPATH', 'STLIBPATH']:
|
||||
lst = []
|
||||
for x in self.env[var]:
|
||||
if x.startswith('/'):
|
||||
lst.append(x)
|
||||
else:
|
||||
lst.append(os.path.normpath(os.path.join(base, x)))
|
||||
self.env[var] = lst
|
33
third_party/waf/waflib/Tools/clang.py
vendored
Normal file
33
third_party/waf/waflib/Tools/clang.py
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
#! /usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
||||
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# Krzysztof Kosiński 2014
|
||||
|
||||
"""
|
||||
Detect the Clang C compiler
|
||||
"""
|
||||
|
||||
from waflib.Tools import ccroot, ar, gcc
|
||||
from waflib.Configure import conf
|
||||
|
||||
@conf
|
||||
def find_clang(conf):
|
||||
"""
|
||||
Finds the program clang and executes it to ensure it really is clang
|
||||
"""
|
||||
cc = conf.find_program('clang', var='CC')
|
||||
conf.get_cc_version(cc, clang=True)
|
||||
conf.env.CC_NAME = 'clang'
|
||||
|
||||
def configure(conf):
|
||||
conf.find_clang()
|
||||
conf.find_program(['llvm-ar', 'ar'], var='AR')
|
||||
conf.find_ar()
|
||||
conf.gcc_common_flags()
|
||||
conf.gcc_modifier_platform()
|
||||
conf.cc_load_tools()
|
||||
conf.cc_add_flags()
|
||||
conf.link_add_flags()
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user