Browse Source

Upgrade WAF to 1.5.14

v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
e898c1f2e3
  1. 2
      bin/node-waf
  2. 8
      tools/waf-light
  3. 17
      tools/wafadmin/3rdparty/boost.py
  4. 60
      tools/wafadmin/Build.py
  5. 27
      tools/wafadmin/Configure.py
  6. 6
      tools/wafadmin/Constants.py
  7. 13
      tools/wafadmin/Environment.py
  8. 25
      tools/wafadmin/Logs.py
  9. 45
      tools/wafadmin/Node.py
  10. 2
      tools/wafadmin/Options.py
  11. 28
      tools/wafadmin/Runner.py
  12. 20
      tools/wafadmin/Scripting.py
  13. 153
      tools/wafadmin/Task.py
  14. 27
      tools/wafadmin/TaskGen.py
  15. 2
      tools/wafadmin/Tools/ar.py
  16. 11
      tools/wafadmin/Tools/ccroot.py
  17. 17
      tools/wafadmin/Tools/compiler_cc.py
  18. 16
      tools/wafadmin/Tools/compiler_cxx.py
  19. 94
      tools/wafadmin/Tools/config_c.py
  20. 7
      tools/wafadmin/Tools/gas.py
  21. 17
      tools/wafadmin/Tools/gnome.py
  22. 5
      tools/wafadmin/Tools/intltool.py
  23. 11
      tools/wafadmin/Tools/javaw.py
  24. 2
      tools/wafadmin/Tools/lua.py
  25. 1
      tools/wafadmin/Tools/misc.py
  26. 138
      tools/wafadmin/Tools/msvc.py
  27. 72
      tools/wafadmin/Tools/perl.py
  28. 12
      tools/wafadmin/Tools/preproc.py
  29. 4
      tools/wafadmin/Tools/python.py
  30. 45
      tools/wafadmin/Tools/qt4.py
  31. 120
      tools/wafadmin/Tools/ruby.py
  32. 64
      tools/wafadmin/Tools/unittestw.py
  33. 77
      tools/wafadmin/Tools/xlc.py
  34. 77
      tools/wafadmin/Tools/xlcxx.py
  35. 67
      tools/wafadmin/Utils.py
  36. 31
      tools/wafadmin/ansiterm.py
  37. 14
      tools/wafadmin/py3kfixes.py

2
bin/node-waf

@ -12,6 +12,6 @@ t = join(w, 'Tools')
sys.path = [w, t] + sys.path
import Scripting
VERSION="1.5.10"
VERSION="1.5.14"
Scripting.prepare(t, os.getcwd(), VERSION, wafdir)
sys.exit(0)

8
tools/waf-light

@ -1,6 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2009
# encoding: ISO8859-1
# Thomas Nagy, 2005-2010
"""
Redistribution and use in source and binary forms, with or without
@ -37,7 +37,7 @@ if 'PSYCOWAF' in os.environ:
try:import psyco;psyco.full()
except:pass
VERSION="1.5.10"
VERSION="1.5.14"
REVISION="x"
INSTALL="x"
C1='x'
@ -149,7 +149,7 @@ t = join(w, 'Tools')
f = join(w, '3rdparty')
sys.path = [w, t, f] + sys.path
if __name__ == '__main__':
import Scripting
Scripting.prepare(t, cwd, VERSION, wafdir)
sys.exit(0)

17
tools/wafadmin/3rdparty/boost.py

@ -17,7 +17,7 @@
# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
#
#def build(bld):
# bld.new_task_gen(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
#
#ISSUES:
# * find_includes should be called only once!
@ -28,7 +28,7 @@
## * the rest of the code has not really been tried
# * make certain a demo is provided (in demos/adv for example)
# TODO: boost.py will be removed in waf 1.6
# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
import os.path, glob, types, re, sys
import Configure, config_c, Options, Utils, Logs
@ -52,6 +52,7 @@ is_versiontag = re.compile('^\d+_\d+_?\d*$')
is_threadingtag = re.compile('^mt$')
is_abitag = re.compile('^[sgydpn]+$')
is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
is_pythontag=re.compile('^py[0-9]{2}$')
def set_options(opt):
opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
@ -74,8 +75,8 @@ def version_string(version):
def libfiles(lib, pattern, lib_paths):
result = []
for lib_path in lib_paths:
libname = pattern % ('boost_' + lib + '*')
result += glob.glob(lib_path + '/' + libname)
libname = pattern % ('boost_%s[!_]*' % lib)
result += glob.glob(os.path.join(lib_path, libname))
return result
@conf
@ -101,7 +102,8 @@ def tags_score(tags, kw):
'threading': kw['tag_threading'],
'abi': kw['tag_abi'],
'toolset': kw['tag_toolset'],
'version': kw['tag_version']
'version': kw['tag_version'],
'python': kw['tag_python']
}
if kw['tag_toolset'] is None:
@ -120,6 +122,7 @@ def tags_score(tags, kw):
if is_threadingtag.match(tag): found_tags['threading'] = tag
if is_abitag.match(tag): found_tags['abi'] = tag
if is_toolsettag.match(tag): found_tags['toolset'] = tag
if is_pythontag.match(tag): found_tags['python'] = tag
for tagname in needed_tags.iterkeys():
if needed_tags[tagname] is not None and tagname in found_tags:
@ -148,8 +151,12 @@ def validate_boost(self, kw):
set_default(kw, x, None)
set_default(kw, 'tag_abi', '^[^d]*$')
set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
set_default(kw, 'score_threading', (10, -10))
set_default(kw, 'score_abi', (10, -10))
set_default(kw, 'score_python', (10,-10))
set_default(kw, 'score_toolset', (1, -1))
set_default(kw, 'score_version', (100, -100))

60
tools/wafadmin/Build.py

@ -33,11 +33,14 @@ class BuildError(Utils.WafError):
Utils.WafError.__init__(self, self.format_error())
def format_error(self):
lst = ['Build failed']
lst = ['Build failed:']
for tsk in self.tasks:
txt = tsk.format_error()
if txt: lst.append(txt)
return '\n'.join(lst)
sep = ' '
if len(lst) > 2:
sep = '\n'
return sep.join(lst)
def group_method(fun):
"""
@ -62,6 +65,7 @@ def group_method(fun):
m = k[0].task_manager
if not m.groups: m.add_group()
m.groups[m.current_group].post_funs.append((fun, k, kw))
if not 'cwd' in kw:
kw['cwd'] = k[0].path
else:
fun(*k, **kw)
@ -269,7 +273,7 @@ class BuildContext(Utils.Context):
self.generator.start()
except KeyboardInterrupt:
dw()
if self.generator.consumers:
if Runner.TaskConsumer.consumers:
self.save()
raise
except Exception:
@ -278,7 +282,7 @@ class BuildContext(Utils.Context):
raise
else:
dw()
if self.generator.consumers:
if Runner.TaskConsumer.consumers:
self.save()
if self.generator.error:
@ -316,6 +320,9 @@ class BuildContext(Utils.Context):
except OSError: pass
def new_task_gen(self, *k, **kw):
if self.task_gen_cache_names:
self.task_gen_cache_names = {}
kw['bld'] = self
if len(k) == 0:
ret = TaskGen.task_gen(*k, **kw)
@ -328,6 +335,13 @@ class BuildContext(Utils.Context):
ret = cls(*k, **kw)
return ret
def __call__(self, *k, **kw):
if self.task_gen_cache_names:
self.task_gen_cache_names = {}
kw['bld'] = self
return TaskGen.task_gen(*k, **kw)
def load_envs(self):
try:
lst = Utils.listdir(self.cachedir)
@ -384,7 +398,7 @@ class BuildContext(Utils.Context):
lstvariants.append(env.variant())
self.lst_variants = lstvariants
debug('build: list of variants is %r' % lstvariants)
debug('build: list of variants is %r', lstvariants)
for name in lstvariants+[0]:
for v in 'node_sigs cache_node_abspath'.split():
@ -418,7 +432,7 @@ class BuildContext(Utils.Context):
if not self.srcnode:
self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
debug('build: srcnode is %s and srcdir %s' % (self.srcnode.name, srcdir))
debug('build: srcnode is %s and srcdir %s', self.srcnode.name, srcdir)
self.path = self.srcnode
@ -498,24 +512,30 @@ class BuildContext(Utils.Context):
lst.reverse()
# list the files in the build dirs
# remove the existing timestamps if the build files are removed
try:
for variant in self.lst_variants:
sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
try:
self.listdir_bld(src_dir_node, sub_path, variant)
except OSError:
#debug('build: osError on ' + sub_path)
# listdir failed, remove all sigs of nodes
# TODO more things to remove?
dict = self.node_sigs[variant]
# listdir failed, remove the build node signatures for all variants
for node in src_dir_node.childs.values():
if node.id in dict:
if node.id & 3 != Node.BUILD:
continue
for dct in self.node_sigs:
if node.id in dct:
dict.__delitem__(node.id)
# avoid deleting the build dir node
if node.id != self.bldnode.id:
# the policy is to avoid removing nodes representing directories
src_dir_node.childs.__delitem__(node.name)
for variant in self.lst_variants:
sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
try:
os.makedirs(sub_path)
except OSError:
pass
# ======================================= #
def listdir_src(self, parent_node):
@ -599,7 +619,7 @@ class BuildContext(Utils.Context):
lst = [str(env[a]) for a in vars_lst]
ret = Utils.h_list(lst)
debug("envhash: %r %r" % (ret, lst))
debug('envhash: %r %r', ret, lst)
# next time
self.cache_sig_vars[idx] = ret
@ -769,6 +789,7 @@ class BuildContext(Utils.Context):
Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
return True
red = re.compile(r"^([A-Za-z]:)?[/\\\\]*")
def get_install_path(self, path, env=None):
"installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
if not env: env = self.env
@ -776,7 +797,7 @@ class BuildContext(Utils.Context):
path = path.replace('/', os.sep)
destpath = Utils.subst_vars(path, env)
if destdir:
destpath = os.path.join(destdir, destpath.lstrip(os.sep))
destpath = os.path.join(destdir, self.red.sub('', destpath))
return destpath
def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
@ -891,10 +912,11 @@ class BuildContext(Utils.Context):
link = True
elif os.readlink(tgt) != src:
link = True
if link:
try: os.remove(tgt)
except OSError: pass
if link:
info('* symlink %s (-> %s)' % (tgt, src))
os.symlink(src, tgt)
return 0
@ -909,7 +931,7 @@ class BuildContext(Utils.Context):
def exec_command(self, cmd, **kw):
# 'runner' zone is printed out for waf -v, see wafadmin/Options.py
debug('runner: system command -> %s' % cmd)
debug('runner: system command -> %s', cmd)
if self.log:
self.log.write('%s\n' % cmd)
kw['log'] = self.log

27
tools/wafadmin/Configure.py

@ -157,6 +157,7 @@ class ConfigurationContext(Utils.Context):
for tool in tools:
tool = tool.replace('++', 'xx')
if tool == 'java': tool = 'javaw'
if tool.lower() == 'unittest': tool = 'unittestw'
# avoid loading the same tool more than once with the same functions
# used by composite projects
@ -166,10 +167,14 @@ class ConfigurationContext(Utils.Context):
self.tool_cache.append(mag)
module = Utils.load_tool(tool, tooldir)
if funs:
self.eval_rules(funs)
else:
func = getattr(module, 'detect', None)
if func:
if type(func) is type(find_file): func(self)
else: self.eval_rules(funs or func)
else: self.eval_rules(func)
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
@ -232,17 +237,20 @@ class ConfigurationContext(Utils.Context):
def check_message_1(self, sr):
self.line_just = max(self.line_just, len(sr))
self.log.write(sr + '\n\n')
for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
self.log.write(x)
Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
def check_message_2(self, sr, color='GREEN'):
self.log.write(sr)
self.log.write('\n')
Utils.pprint(color, sr)
def check_message(self, th, msg, state, option=''):
sr = 'Checking for %s %s' % (th, msg)
self.check_message_1(sr)
p = self.check_message_2
if state: p('ok ' + option)
if state: p('ok ' + str(option))
else: p('not found', 'YELLOW')
# FIXME remove in waf 1.6
@ -268,10 +276,15 @@ class ConfigurationContext(Utils.Context):
ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
if ret: break
self.check_message('program', ','.join(filename), ret, ret)
self.log.write('find program=%r paths=%r var=%r -> %r\n\n' % (filename, path_list, var, ret))
if not ret and mandatory:
self.fatal('The program %r could not be found' % filename)
self.check_message_1('Check for program %s' % ' or '.join(filename))
self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
if ret:
Utils.pprint('GREEN', str(ret))
else:
Utils.pprint('YELLOW', 'not found')
if mandatory:
self.fatal('The program %r is required' % filename)
if var:
self.env[var] = ret
return ret

6
tools/wafadmin/Constants.py

@ -9,9 +9,9 @@ maintainer: the version number is updated from the top-level wscript file
"""
# do not touch these three lines, they are updated automatically
HEXVERSION = 0x105010
WAFVERSION="1.5.10"
WAFREVISION = "6794M"
HEXVERSION = 0x105014
WAFVERSION="1.5.14"
WAFREVISION = "7363M"
ABI = 7
# permissions

13
tools/wafadmin/Environment.py

@ -53,9 +53,14 @@ class Environment(object):
def __setitem__(self, key, value):
self.table[key] = value
def __delitem__(self, key, value):
def __delitem__(self, key):
del self.table[key]
def pop(self, key, *args):
if len(args):
return self.table.pop(key, *args)
return self.table.pop(key)
def set_variant(self, name):
self.table[VARIANT] = name
@ -173,7 +178,7 @@ class Environment(object):
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
Logs.debug('env: %s', self.table)
def get_destdir(self):
"return the destdir, useful for installing"
@ -197,9 +202,9 @@ class Environment(object):
else:
self[name] = value
def __detattr__(self, name):
def __delattr__(self, name):
if name in self.__slots__:
object.__detattr__(self, name)
object.__delattr__(self, name)
else:
del self[name]

25
tools/wafadmin/Logs.py

@ -23,12 +23,13 @@ colors_lst = {
'cursor_off' :'\x1b[?25l',
}
got_tty = not os.environ.get('TERM', 'dumb') in ['dumb', 'emacs']
if got_tty:
got_tty = False
term = os.environ.get('TERM', 'dumb')
if not term in ['dumb', 'emacs']:
try:
got_tty = sys.stderr.isatty()
got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
except AttributeError:
got_tty = False
pass
import Utils
@ -93,17 +94,17 @@ class formatter(logging.Formatter):
return rec.c1+rec.msg+rec.c2
return logging.Formatter.format(self, rec)
def debug(msg):
def debug(*k, **kw):
if verbose:
# FIXME why does it eat the newlines????
msg = msg.replace('\n', ' ')
logging.debug(msg)
k = list(k)
k[0] = k[0].replace('\n', ' ')
logging.debug(*k, **kw)
def error(msg):
logging.error(msg)
def error(*k, **kw):
logging.error(*k, **kw)
if verbose > 1:
if isinstance(msg, Utils.WafError):
st = msg.stack
if isinstance(k[0], Utils.WafError):
st = k[0].stack
else:
st = traceback.extract_stack()
if st:

45
tools/wafadmin/Node.py

@ -30,8 +30,8 @@ ${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
"""
import os, sys, fnmatch, re
import Utils
import os, sys, fnmatch, re, stat
import Utils, Constants
UNDEFINED = 0
DIR = 1
@ -256,7 +256,6 @@ class Node(object):
return None
return current
# FIXME: remove in waf 1.6 ?
def ensure_dir_node_from_path(self, lst):
"used very rarely, force the construction of a branch of node instance for representing folders"
@ -278,7 +277,6 @@ class Node(object):
current = self.__class__(name, prev, DIR)
return current
# FIXME: remove in waf 1.6
def exclusive_build_node(self, path):
"""
create a hierarchy in the build dir (no source folders) for ill-behaving compilers
@ -495,7 +493,7 @@ class Node(object):
return self.name[k:]
def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
"find nodes in the filesystem hierarchy, try to instanciate the nodes passively"
"""find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
bld_ctx = self.__class__.bld
bld_ctx.rescan(self)
for name in bld_ctx.cache_dir_contents[self.id]:
@ -534,7 +532,7 @@ class Node(object):
raise StopIteration
def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
"find nodes recursively, this returns everything but folders by default"
"""find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
if not (src or bld or dir):
raise StopIteration
@ -568,9 +566,12 @@ class Node(object):
return ret
def ant_glob(self, *k, **kw):
"""
known gotcha: will enumerate the files, but only if the folder exists in the source directory
"""
src=kw.get('src', 1)
bld=kw.get('bld', 1)
bld=kw.get('bld', 0)
dir=kw.get('dir', 0)
excl = kw.get('excl', exclude_regs)
incl = k and k[0] or kw.get('incl', '**')
@ -655,6 +656,36 @@ class Node(object):
return ret
def update_build_dir(self, env=None):
if not env:
for env in bld.all_envs:
self.update_build_dir(env)
return
path = self.abspath(env)
lst = Utils.listdir(path)
try:
self.__class__.bld.cache_dir_contents[self.id].update(lst)
except KeyError:
self.__class__.bld.cache_dir_contents[self.id] = set(lst)
self.__class__.bld.cache_scanned_folders[self.id] = True
for k in lst:
npath = path + os.sep + k
st = os.stat(npath)
if stat.S_ISREG(st[stat.ST_MODE]):
ick = self.find_or_declare(k)
if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
elif stat.S_ISDIR(st[stat.ST_MODE]):
child = self.find_dir(k)
if not child:
child = self.ensure_dir_node_from_path(k)
child.update_build_dir(env)
class Nodu(Node):
pass

2
tools/wafadmin/Options.py

@ -260,6 +260,8 @@ class Handler(Utils.Context):
for tool in tools:
tool = tool.replace('++', 'xx')
if tool == 'java': tool = 'javaw'
if tool.lower() == 'unittest': tool = 'unittestw'
module = Utils.load_tool(tool, path)
try:
fun = module.set_options

28
tools/wafadmin/Runner.py

@ -24,10 +24,12 @@ def run(*args, **kwargs):
threading.Thread.run = run
class TaskConsumer(threading.Thread):
def __init__(self, m):
ready = Queue(0)
consumers = []
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(1)
self.master = m
self.start()
def run(self):
@ -37,9 +39,9 @@ class TaskConsumer(threading.Thread):
pass
def loop(self):
m = self.master
while 1:
tsk = m.ready.get()
tsk = TaskConsumer.ready.get()
m = tsk.master
if m.stop:
m.out.put(tsk)
continue
@ -98,16 +100,13 @@ class Parallel(object):
# tasks that are awaiting for another task to complete
self.frozen = []
# tasks waiting to be run by the consumers
self.ready = Queue(0)
# tasks returned by the consumers
self.out = Queue(0)
self.count = 0 # tasks not in the producer area
self.processed = 1 # progress indicator
self.consumers = None # the consumer threads, created lazily
self.stop = False # error condition to stop the build
self.error = False # error flag
@ -162,6 +161,12 @@ class Parallel(object):
def start(self):
"execute the tasks"
if TaskConsumer.consumers:
# the worker pool is usually loaded lazily (see below)
# in case it is re-used with a different value of numjobs:
while len(TaskConsumer.consumers) < self.numjobs:
TaskConsumer.consumers.append(TaskConsumer())
while not self.stop:
self.refill_task_list()
@ -202,12 +207,13 @@ class Parallel(object):
# run me: put the task in ready queue
tsk.position = (self.processed, self.total)
self.count += 1
self.ready.put(tsk)
tsk.master = self
TaskConsumer.ready.put(tsk)
self.processed += 1
# create the consumer threads only if there is something to consume
if not self.consumers:
self.consumers = [TaskConsumer(self) for i in xrange(self.numjobs)]
if not TaskConsumer.consumers:
TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete

20
tools/wafadmin/Scripting.py

@ -201,19 +201,20 @@ def configure(conf):
src = getattr(Options.options, SRCDIR, None)
if not src: src = getattr(Utils.g_module, SRCDIR, None)
if not src: src = getattr(Utils.g_module, 'top', None)
if not src:
src = '.'
incomplete_src = 1
src = os.path.abspath(src)
bld = getattr(Options.options, BLDDIR, None)
if not bld:
bld = getattr(Utils.g_module, BLDDIR, None)
if bld == '.':
raise Utils.WafError('Setting blddir="." may cause distclean problems')
if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
if not bld: bld = getattr(Utils.g_module, 'out', None)
if not bld:
bld = 'build'
incomplete_bld = 1
if bld == '.':
raise Utils.WafError('Setting blddir="." may cause distclean problems')
bld = os.path.abspath(bld)
try: os.makedirs(bld)
@ -418,7 +419,7 @@ def dont_dist(name, src, build_dir):
if (name.startswith(',,')
or name.startswith('++')
or name.startswith('.waf-1.')
or name.startswith('.waf')
or (src == '.' and name == Options.lockfile)
or name in excludes
or name == build_dir
@ -451,6 +452,7 @@ def copytree(src, dst, build_dir):
# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
def distclean(ctx=None):
'''removes the build directory'''
global commands
lst = os.listdir('.')
for f in lst:
if f == Options.lockfile:
@ -475,7 +477,7 @@ def distclean(ctx=None):
Logs.warn('file %r cannot be removed' % f)
# remove the local waf cache
if f.startswith('.waf-'):
if not commands and f.startswith('.waf'):
shutil.rmtree(f, ignore_errors=True)
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
@ -488,7 +490,10 @@ def dist(appname='', version=''):
if not version: version = getattr(Utils.g_module, VERSION, '1.0')
tmp_folder = appname + '-' + version
if g_gz in ['gz', 'bz2']:
arch_name = tmp_folder + '.tar.' + g_gz
else:
arch_name = tmp_folder + '.' + 'zip'
# remove the previous dir
try:
@ -516,9 +521,12 @@ def dist(appname='', version=''):
# go back to the root directory
os.chdir(back)
if g_gz in ['gz', 'bz2']:
tar = tarfile.open(arch_name, 'w:' + g_gz)
tar.add(tmp_folder)
tar.close()
else:
Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
try: from hashlib import sha1 as sha
except ImportError: from sha import sha

153
tools/wafadmin/Task.py

@ -42,7 +42,7 @@ The role of the Task Manager is to give the tasks in order (groups of task that
"""
import os, shutil, sys, re, random, datetime
import os, shutil, sys, re, random, datetime, tempfile
from Utils import md5
import Build, Runner, Utils, Node, Logs, Options
from Logs import debug, warn, error
@ -346,6 +346,7 @@ class store_task_type(type):
if name.endswith('_task'):
name = name.replace('_task', '')
if name != 'TaskBase':
TaskBase.classes[name] = cls
class TaskBase(object):
@ -612,18 +613,14 @@ class Task(TaskBase):
bld = self.generator.bld
# first compute the signature
try:
new_sig = self.signature()
except KeyError:
debug("task: something is wrong, computing the task %r signature failed" % self)
return RUN_ME
# compare the signature to a signature computed previously
key = self.unique_id()
try:
prev_sig = bld.task_sigs[key][0]
except KeyError:
debug("task: task %r must run as it was never run before or the task code changed" % self)
debug("task: task %r must run as it was never run before or the task code changed", self)
return RUN_ME
# compare the signatures of the outputs
@ -633,7 +630,7 @@ class Task(TaskBase):
if bld.node_sigs[variant][node.id] != new_sig:
return RUN_ME
except KeyError:
debug("task: task %r must run as the output nodes do not exist" % self)
debug("task: task %r must run as the output nodes do not exist", self)
return RUN_ME
# debug if asked to
@ -648,8 +645,8 @@ class Task(TaskBase):
bld = self.generator.bld
env = self.env
sig = self.signature()
ssig = sig.encode('hex')
cnt = 0
variant = env.variant()
for node in self.outputs:
# check if the node exists ..
@ -662,35 +659,78 @@ class Task(TaskBase):
# important, store the signature for the next run
bld.node_sigs[variant][node.id] = sig
bld.task_sigs[self.unique_id()] = self.cache_sig
# We could re-create the signature of the task with the signature of the outputs
# in practice, this means hashing the output files
# this is unnecessary
if Options.cache_global:
ssig = sig.encode('hex')
dest = os.path.join(Options.cache_global, '%s_%d_%s' % (ssig, cnt, node.name))
try: shutil.copy2(node.abspath(env), dest)
except IOError: warn('Could not write the file to the cache')
cnt += 1
# file caching, if possible
# try to avoid data corruption as much as possible
if not Options.cache_global or Options.options.nocache or not self.outputs:
return None
bld.task_sigs[self.unique_id()] = self.cache_sig
if getattr(self, 'cached', None):
return None
dname = os.path.join(Options.cache_global, ssig)
tmpdir = tempfile.mkdtemp(prefix=Options.cache_global)
try:
shutil.rmtree(dname)
except:
pass
try:
for node in self.outputs:
variant = node.variant(env)
dest = os.path.join(tmpdir, node.name)
shutil.copy2(node.abspath(env), dest)
except (OSError, IOError):
try:
shutil.rmtree(tmpdir)
except:
pass
else:
try:
os.rename(tmpdir, dname)
except OSError:
try:
shutil.rmtree(tmpdir)
except:
pass
else:
try:
os.chmod(dname, O755)
except:
pass
def can_retrieve_cache(self):
"""Retrieve build nodes from the cache - the file time stamps are updated
for cleaning the least used files from the cache dir - be careful when overridding"""
if not Options.cache_global: return None
if Options.options.nocache: return None
if not self.outputs: return None
"""
Retrieve build nodes from the cache
update the file timestamps to help cleaning the least used entries from the cache
additionally, set an attribute 'cached' to avoid re-creating the same cache files
suppose there are files in cache/dir1/file1 and cache/dir2/file2
first, read the timestamp of dir1
then try to copy the files
then look at the timestamp again, if it has changed, the data may have been corrupt (cache update by another process)
should an exception occur, ignore the data
"""
if not Options.cache_global or Options.options.nocache or not self.outputs:
return None
env = self.env
sig = self.signature()
ssig = sig.encode('hex')
# first try to access the cache folder for the task
dname = os.path.join(Options.cache_global, ssig)
try:
t1 = os.stat(dname).st_mtime
except OSError:
return None
cnt = 0
for node in self.outputs:
variant = node.variant(env)
ssig = sig.encode('hex')
orig = os.path.join(Options.cache_global, '%s_%d_%s' % (ssig, cnt, node.name))
orig = os.path.join(dname, node.name)
try:
shutil.copy2(orig, node.abspath(env))
# mark the cache file as used recently (modified)
@ -698,13 +738,21 @@ class Task(TaskBase):
except (OSError, IOError):
debug('task: failed retrieving file')
return None
else:
cnt += 1
# is it the same folder?
try:
t2 = os.stat(dname).st_mtime
except OSError:
return None
if t1 != t2:
return None
for node in self.outputs:
self.generator.bld.node_sigs[variant][node.id] = sig
self.generator.bld.printout('restoring from cache %r\n' % node.bldpath(env))
self.cached = True
return 1
def debug_why(self, old_sigs):
@ -714,12 +762,12 @@ class Task(TaskBase):
def v(x):
return x.encode('hex')
debug("Task %r" % self)
debug("Task %r", self)
msgs = ['Task must run', '* Source file or manual dependency', '* Implicit dependency', '* Environment variable']
tmp = 'task: -> %s: %s %s'
for x in xrange(len(msgs)):
if (new_sigs[x] != old_sigs[x]):
debug(tmp % (msgs[x], v(old_sigs[x]), v(new_sigs[x])))
debug(tmp, msgs[x], v(old_sigs[x]), v(new_sigs[x]))
def sig_explicit_deps(self):
bld = self.generator.bld
@ -731,7 +779,10 @@ class Task(TaskBase):
bld.rescan(x.parent)
variant = x.variant(self.env)
try:
m.update(bld.node_sigs[variant][x.id])
except KeyError:
raise Utils.WafError('Missing node signature for %r (required by %r)' % (x, self))
# manual dependencies, they can slow down the builds
if bld.deps_man:
@ -748,8 +799,8 @@ class Task(TaskBase):
variant = v.variant(self.env)
try:
v = bld.node_sigs[variant][v.id]
except KeyError: # make it fatal?
v = ''
except KeyError:
raise Utils.WafError('Missing node signature for %r (required by %r)' % (v, self))
elif hasattr(v, '__call__'):
v = v() # dependency is a function, call it
m.update(v)
@ -805,14 +856,21 @@ class Task(TaskBase):
# no previous run or the signature of the dependencies has changed, rescan the dependencies
(nodes, names) = self.scan()
if Logs.verbose:
debug('deps: scanner for %s returned %s %s' % (str(self), str(nodes), str(names)))
debug('deps: scanner for %s returned %s %s', str(self), str(nodes), str(names))
# store the dependencies in the cache
bld.node_deps[key] = nodes
bld.raw_deps[key] = names
# recompute the signature and return it
try:
sig = self.compute_sig_implicit_deps()
except KeyError:
try:
nodes = bld.node_deps.get(self.unique_id(), [])
except:
nodes = '?'
raise Utils.WafError('Missing node signature for %r (for implicit dependencies %r)' % (nodes, self))
return sig
@ -855,7 +913,7 @@ def compile_fun_shell(name, line):
The reserved keywords TGT and SRC represent the task input and output nodes
quick test:
bld.new_task_gen(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
"""
extr = []
@ -886,7 +944,7 @@ def compile_fun_shell(name, line):
c = COMPILE_TEMPLATE_SHELL % (line, parm)
debug('action: %s' % c)
debug('action: %s', c)
return (funex(c), dvars)
def compile_fun_noshell(name, line):
@ -924,7 +982,7 @@ def compile_fun_noshell(name, line):
app("lst.extend(%r)" % params[-1].split())
fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
debug('action: %s' % fun)
debug('action: %s', fun)
return (funex(fun), dvars)
def compile_fun(name, line, shell=None):
@ -992,10 +1050,29 @@ def update_outputs(cls):
def post_run(self):
old_post_run(self)
bld = self.outputs[0].__class__.bld
bld.node_sigs[self.env.variant()][self.outputs[0].id] = \
Utils.h_file(self.outputs[0].abspath(self.env))
for output in self.outputs:
bld.node_sigs[self.env.variant()][output.id] = Utils.h_file(output.abspath(self.env))
cls.post_run = post_run
old_runnable_status = cls.runnable_status
def runnable_status(self):
status = old_runnable_status(self)
if status != RUN_ME:
return status
try:
bld = self.outputs[0].__class__.bld
new_sig = self.signature()
prev_sig = bld.task_sigs[self.unique_id()][0]
if prev_sig == new_sig:
return SKIP_ME
except KeyError:
pass
except IndexError:
pass
return RUN_ME
cls.runnable_status = runnable_status
def extract_outputs(tasks):
"""file_deps: Infer additional dependencies from task input and output nodes
"""

27
tools/wafadmin/TaskGen.py

@ -196,13 +196,13 @@ class task_gen(object):
self.meths = out
# then we run the methods in order
debug('task_gen: posting %s %d' % (self, id(self)))
debug('task_gen: posting %s %d', self, id(self))
for x in out:
try:
v = getattr(self, x)
except AttributeError:
raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
debug('task_gen: -> %s (%d)' % (x, id(self)))
debug('task_gen: -> %s (%d)', x, id(self))
v()
def post(self):
@ -217,7 +217,7 @@ class task_gen(object):
#error("OBJECT ALREADY POSTED" + str( self))
return
self.apply()
debug('task_gen: posted %s' % self.name)
debug('task_gen: posted %s', self.name)
self.posted = True
def get_hook(self, ext):
@ -342,7 +342,7 @@ def declare_order(*k):
if not f1 in task_gen.prec[f2]:
task_gen.prec[f2].append(f1)
def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=1, color='BLUE',
def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
install=0, before=[], after=[], decider=None, rule=None, scan=None):
"""
see Tools/flex.py for an example
@ -363,7 +363,7 @@ def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=1, color=
def x_file(self, node):
if decider:
ext = decider(self, node)
elif isinstance(ext_out, str):
else:
ext = ext_out
if isinstance(ext, str):
@ -373,7 +373,7 @@ def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=1, color=
elif isinstance(ext, list):
out_source = [node.change_ext(x) for x in ext]
if reentrant:
for i in xrange(reentrant):
for i in xrange((reentrant is True) and len(out_source) or reentrant):
self.allnodes.append(out_source[i])
else:
# XXX: useless: it will fail on Utils.to_list above...
@ -405,6 +405,7 @@ Intelligent compilers binding aspect-oriented programming and parallelization, w
"""
def taskgen(func):
setattr(task_gen, func.__name__, func)
return func
def feature(*k):
def deco(func):
@ -502,6 +503,8 @@ def exec_rule(self):
# create the task class
name = getattr(self, 'name', None) or self.target or self.rule
if not isinstance(name, str):
name = str(self.idx)
cls = Task.task_type_from_func(name, func, vars)
# now create one instance
@ -524,9 +527,6 @@ def exec_rule(self):
raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
tsk.inputs.append(y)
if getattr(self, 'always', None):
Task.always_run(cls)
if getattr(self, 'scan', None):
cls.scan = self.scan
@ -539,7 +539,10 @@ def exec_rule(self):
if getattr(self, 'on_results', None):
Task.update_outputs(cls)
for x in ['after', 'before']:
if getattr(self, 'always', None):
Task.always_run(cls)
for x in ['after', 'before', 'ext_in', 'ext_out']:
setattr(cls, x, getattr(self, x, []))
feature('*')(exec_rule)
before('apply_core')(exec_rule)
@ -552,8 +555,8 @@ def sequence_order(self):
there is also an awesome trick for executing the method in last position
to use:
bld.new_task_gen(features='javac seq')
bld.new_task_gen(features='jar seq')
bld(features='javac seq')
bld(features='jar seq')
to start a new sequence, set the attribute seq_start, for example:
obj.seq_start = True

2
tools/wafadmin/Tools/ar.py

@ -10,7 +10,7 @@ import Task, Utils
from Configure import conftest
ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', shell=False)
cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada

11
tools/wafadmin/Tools/ccroot.py

@ -132,7 +132,7 @@ def scan(self):
node = self.inputs[0]
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
return (nodes, names)
all_nodes = []
@ -141,7 +141,7 @@ def scan(self):
for node in self.inputs:
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
for x in nodes:
if id(x) in seen: continue
seen.add(id(x))
@ -209,7 +209,7 @@ def default_cc(self):
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
def apply_verif(self):
"""no particular order, used for diagnostic"""
if not (self.source or getattr(self, 'add_objects', None)):
if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None)):
raise Utils.WafError('no source files specified for %s' % self)
if not self.target:
raise Utils.WafError('no target for %s' % self)
@ -329,10 +329,13 @@ def apply_link(self):
self.link_task = tsk
@feature('cc', 'cxx')
@after('apply_link', 'init_cc', 'init_cxx')
@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
def apply_lib_vars(self):
"""after apply_link because of 'link_task'
after default_cc because of the attribute 'uselib'"""
# after 'apply_core' in case if 'cc' if there is no link
env = self.env
# 1. the case of the libs defined in the project (visit ancestors first)

17
tools/wafadmin/Tools/compiler_cc.py

@ -11,7 +11,7 @@ c_compiler = {
'win32': ['msvc', 'gcc'],
'cygwin': ['gcc'],
'darwin': ['gcc'],
'aix5': ['gcc'],
'aix': ['xlc', 'gcc'],
'linux': ['gcc', 'icc', 'suncc'],
'sunos': ['gcc', 'suncc'],
'irix': ['gcc'],
@ -33,17 +33,24 @@ def detect(conf):
"""
try: test_for_compiler = Options.options.check_c_compiler
except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
orig = conf.env
for compiler in test_for_compiler.split():
conf.env = orig.copy()
try:
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cc: %r' % e)
else:
if conf.env['CC']:
orig.table = conf.env.get_merged_dict()
conf.env = orig
conf.check_message(compiler, '', True)
conf.env['COMPILER_CC'] = compiler
break
conf.check_message(compiler, '', False)
break
else:
conf.fatal('could not configure a c compiler!')
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
@ -57,11 +64,3 @@ def set_options(opt):
for c_compiler in test_for_compiler.split():
opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
"""opt.add_option('-d', '--debug-level',
action = 'store',
default = ccroot.DEBUG_LEVELS.RELEASE,
help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL),
choices = ccroot.DEBUG_LEVELS.ALL,
dest = 'debug_level')"""

16
tools/wafadmin/Tools/compiler_cxx.py

@ -11,7 +11,7 @@ cxx_compiler = {
'win32': ['msvc', 'g++'],
'cygwin': ['g++'],
'darwin': ['g++'],
'aix': ['g++'],
'aix': ['xlc++', 'g++'],
'linux': ['g++', 'icpc', 'sunc++'],
'sunos': ['g++', 'sunc++'],
'irix': ['g++'],
@ -28,17 +28,24 @@ def __list_possible_compiler(platform):
def detect(conf):
try: test_for_compiler = Options.options.check_cxx_compiler
except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
orig = conf.env
for compiler in test_for_compiler.split():
try:
conf.env = orig.copy()
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cxx: %r' % e)
else:
if conf.env['CXX']:
orig.table = conf.env.get_merged_dict()
conf.env = orig
conf.check_message(compiler, '', True)
conf.env['COMPILER_CXX'] = compiler
break
conf.check_message(compiler, '', False)
break
else:
conf.fatal('could not configure a cxx compiler!')
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
@ -52,10 +59,3 @@ def set_options(opt):
for cxx_compiler in test_for_compiler.split():
opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
"""opt.add_option('-d', '--debug-level',
action = 'store',
default = ccroot.DEBUG_LEVELS.RELEASE,
help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL),
choices = ccroot.DEBUG_LEVELS.ALL,
dest = 'debug_level')"""

94
tools/wafadmin/Tools/config_c.py

@ -100,6 +100,11 @@ def validate_cfg(self, kw):
if 'modversion' in kw:
return
if 'variables' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for %s variables' % kw['package']
return
# checking for the version of a module, for the moment, one thing at a time
for x in cfg_ver.keys():
y = x.replace('-', '_')
@ -112,7 +117,7 @@ def validate_cfg(self, kw):
return
if not 'msg' in kw:
kw['msg'] = 'Checking for %s' % kw['package']
kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
if not 'errmsg' in kw:
@ -121,22 +126,31 @@ def validate_cfg(self, kw):
@conf
def cmd_and_log(self, cmd, kw):
Logs.debug('runner: %s\n' % cmd)
if self.log: self.log.write('%s\n' % cmd)
if self.log:
self.log.write('%s\n' % cmd)
try:
p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, shell=True)
output = p.communicate()[0]
except OSError:
self.fatal('fail')
p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
(out, err) = p.communicate()
except OSError, e:
self.log.write('error %r' % e)
self.fatal(str(e))
out = str(out)
err = str(err)
if self.log:
self.log.write(out)
self.log.write(err)
if p.returncode:
if not kw.get('errmsg', ''):
if kw.get('mandatory', False):
kw['errmsg'] = output.strip()
kw['errmsg'] = out.strip()
else:
kw['errmsg'] = 'fail'
self.fatal('fail')
return output
return out
@conf
def exec_cfg(self, kw):
@ -165,6 +179,18 @@ def exec_cfg(self, kw):
self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
return version
# retrieving variables of a module
if 'variables' in kw:
env = kw.get('env', self.env)
uselib = kw.get('uselib_store', kw['package'].upper())
vars = Utils.to_list(kw['variables'])
for v in vars:
val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
env.append_unique('%s_%s' % (uselib, v), val)
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
return
lst = [kw['path']]
for key, val in kw.get('define_variable', {}).iteritems():
lst.append('--define-variable=%s=%s' % (key, val))
@ -184,6 +210,12 @@ def exec_cfg(self, kw):
@conf
def check_cfg(self, *k, **kw):
"""
for pkg-config mostly, but also all the -config tools
conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
"""
self.validate_cfg(kw)
if 'msg' in kw:
self.check_message_1(kw['msg'])
@ -213,7 +245,7 @@ def check_cfg(self, *k, **kw):
# env: an optional environment (modified -> provide a copy)
# compiler: cc or cxx - it tries to guess what is best
# type: program, shlib, staticlib, objects
# type: cprogram, cshlib, cstaticlib
# code: a c code to execute
# uselib_store: where to add the variables
# uselib: parameters to use for building
@ -364,23 +396,26 @@ def validate_c(self, kw):
def post_check(self, *k, **kw):
"set the variables after a test was run successfully"
is_success = 0
is_success = False
if kw['execute']:
if kw['success']:
is_success = kw['success']
is_success = True
else:
is_success = (kw['success'] == 0)
def define_or_stuff():
nm = kw['define_name']
if kw['execute'] and kw.get('define_ret', None) and isinstance(is_success, str):
self.define(kw['define_name'], is_success, quote=kw.get('quote', 1))
else:
self.define_cond(kw['define_name'], is_success)
if 'define_name' in kw:
if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
define_or_stuff()
if kw['execute']:
key = kw['success']
if isinstance(key, str):
if key:
self.define(kw['define_name'], key, quote=kw.get('quote', 1))
else:
self.define_cond(kw['define_name'], True)
else:
self.define_cond(kw['define_name'], False)
else:
self.define_cond(kw['define_name'], is_success)
if is_success and 'uselib_store' in kw:
import cc, cxx
@ -478,7 +513,7 @@ def run_c_code(self, *k, **kw):
bld.rescan(bld.srcnode)
o = bld.new_task_gen(features=[kw['compile_mode'], kw['type']], source=test_f_name, target='testprog')
o = bld(features=[kw['compile_mode'], kw['type']], source=test_f_name, target='testprog')
for k, v in kw.iteritems():
setattr(o, k, v)
@ -507,11 +542,18 @@ def run_c_code(self, *k, **kw):
# if we need to run the program, try to get its result
if kw['execute']:
args = Utils.to_list(kw.get('exec_args', []))
try:
data = Utils.cmd_output([lastprog] + args).strip()
except ValueError, e:
proc = Utils.pproc.Popen([lastprog], *args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
(out, err) = proc.communicate()
w = self.log.write
w(str(out))
w('\n')
w(str(err))
w('\n')
w('returncode %r' % proc.returncode)
w('\n')
if proc.returncode:
self.fatal(Utils.ex_stack())
ret = data
ret = out
return ret
@ -540,7 +582,7 @@ def define(self, define, value, quote=1):
# the user forgot to tell if the value is quoted or not
if isinstance(value, str):
if quote:
tbl[define] = '"%s"' % str(value)
tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
else:
tbl[define] = value
elif isinstance(value, int):
@ -643,8 +685,6 @@ def get_config_header(self):
config_header.append('#define %s' % key)
elif value is UNDEFINED:
config_header.append('/* #undef %s */' % key)
elif isinstance(value, str):
config_header.append('#define %s %s' % (key, repr(value)[1:-1]))
else:
config_header.append('#define %s %s' % (key, value))
return "\n".join(config_header)

7
tools/wafadmin/Tools/gas.py

@ -23,14 +23,13 @@ def asm_hook(self, node):
self.compiled_tasks.append(task)
self.meths.append('asm_incflags')
@taskgen
@after('apply_obj_vars_cc')
@after('apply_obj_vars_cxx')
@before('apply_link')
def asm_incflags(self):
if self.env['ASINCFLAGS']: self.env['_ASINCFLAGS'] = self.env['ASINCFLAGS']
if 'cxx' in self.features: self.env['_ASINCFLAGS'] = self.env['_CXXINCFLAGS']
else: self.env['_ASINCFLAGS'] = self.env['_CCINCFLAGS']
self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
var = ('cxx' in self.features) and 'CXX' or 'CC'
self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
def detect(conf):
conf.find_program(['gas', 'as'], var='AS')

17
tools/wafadmin/Tools/gnome.py

@ -66,13 +66,18 @@ def apply_gnome_doc(self):
self.env['APPNAME'] = self.doc_module
lst = self.to_list(self.doc_linguas)
bld = self.bld
lst.append('C')
for x in lst:
if not x == 'C':
tsk = self.create_task('xml2po')
node = self.path.find_resource(x+'/'+x+'.po')
src = self.path.find_resource('C/%s.xml' % self.doc_module)
out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
tsk.set_inputs([node, src])
tsk.set_outputs(out)
else:
out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
tsk2 = self.create_task('xsltproc2po')
out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
@ -83,8 +88,8 @@ def apply_gnome_doc(self):
tsk2.run_after.append(tsk)
if bld.is_install:
path = self.install_path + 'gnome/help/%s/%s' % (self.doc_module, x)
bld.install_files(self.install_path + 'omf', out2, env=self.env)
path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
bld.install_files(self.install_path + '/omf', out2, env=self.env)
for y in self.to_list(self.doc_figures):
try:
os.stat(self.path.abspath() + '/' + x + '/' + y)
@ -92,6 +97,12 @@ def apply_gnome_doc(self):
except:
bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
if x == 'C':
xmls = self.to_list(self.doc_includes)
xmls.append(self.doc_entities)
for z in xmls:
out = self.path.find_resource('%s/%s' % (x, z))
bld.install_as(path + '/%s' % z, out.abspath(self.env))
# OBSOLETE
class xml_to_taskgen(TaskGen.task_gen):
@ -180,7 +191,7 @@ Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', colo
xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
--stringparam db2omf.basename ${APPNAME} \
--stringparam db2omf.format docbook \
--stringparam db2omf.lang C \
--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \

5
tools/wafadmin/Tools/intltool.py

@ -12,11 +12,10 @@ from Logs import error
"""
Usage:
bld.new_task_gen(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
"""
class intltool_in_taskgen(TaskGen.task_gen):
"""deprecated"""
def __init__(self, *k, **kw):

11
tools/wafadmin/Tools/javaw.py

@ -123,16 +123,15 @@ def apply_java(self):
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
if self.jarname:
tsk = self.create_task('jar_create')
tsk.set_inputs(bld_nodes)
tsk.set_outputs(self.path.find_or_declare(self.jarname))
jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
jtsk.set_run_after(tsk)
if not self.env['JAROPTS']:
if not self.env.JAROPTS:
if self.jaropts:
self.env['JAROPTS'] = self.jaropts
self.env.JAROPTS = self.jaropts
else:
dirs = '.'
self.env['JAROPTS'] = ['-C', ''.join(self.env['OUTDIR']), dirs]
self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN')
cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}')

2
tools/wafadmin/Tools/lua.py

@ -12,7 +12,7 @@ TaskGen.declare_chain(
rule = '${LUAC} -s -o ${TGT} ${SRC}',
ext_in = '.lua',
ext_out = '.luac',
reentrant = 0,
reentrant = False,
install = 'LUADIR', # env variable
)

1
tools/wafadmin/Tools/misc.py

@ -72,6 +72,7 @@ def apply_copy(self):
tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.chmod = self.chmod
tsk.install_path = self.install_path
if not tsk.env:
tsk.debug()

138
tools/wafadmin/Tools/msvc.py

@ -80,15 +80,15 @@ def setup_msvc(conf, versions):
for target in platforms:
try:
arch,(p1,p2,p3) = targets[target]
compiler,version = version.split()
return compiler,p1,p2,p3
compiler,revision = version.split()
return compiler,revision,p1,p2,p3
except KeyError: continue
except KeyError: continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf, compiler, version, target, vcvars):
debug('msvc: get_msvc_version: ' + compiler + ' ' + version + ' ' + target + ' ...')
debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
f = open(batfile, 'w')
f.write("""@echo off
@ -107,7 +107,7 @@ echo LIB=%%LIB%%
if lines[0].find(x) != -1:
break
else:
debug('msvc: get_msvc_version: %r %r %r -> not found' % (compiler, version, target))
debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
for line in lines[1:]:
@ -136,11 +136,11 @@ echo LIB=%%LIB%%
if p.returncode != 0:
raise Exception('return code: %r: %r' % (p.returncode, err))
except Exception, e:
debug('msvc: get_msvc_version: %r %r %r -> failure' % (compiler, version, target))
debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
debug('msvc: get_msvc_version: %r %r %r -> OK' % (compiler, version, target))
debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
@ -297,12 +297,13 @@ def gather_icl_versions(conf, versions):
@conf
def get_msvc_versions(conf):
if not conf.env['MSVC_INSTALLED_VERSIONS']:
conf.env['MSVC_INSTALLED_VERSIONS'] = []
conf.gather_msvc_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
conf.gather_wsdk_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
conf.gather_icl_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
return conf.env['MSVC_INSTALLED_VERSIONS']
if not conf.env.MSVC_INSTALLED_VERSIONS:
lst = []
conf.gather_msvc_versions(lst)
conf.gather_wsdk_versions(lst)
conf.gather_icl_versions(lst)
conf.env.MSVC_INSTALLED_VERSIONS = lst
return conf.env.MSVC_INSTALLED_VERSIONS
@conf
def print_all_msvc_detected(conf):
@ -401,7 +402,7 @@ def libname_msvc(self, libname, is_static=False, mandatory=False):
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path, libn)):
debug('msvc: lib found: %s' % os.path.join(path,libn))
debug('msvc: lib found: %s', os.path.join(path,libn))
return re.sub('\.lib$', '',libn)
#if no lib can be found, just return the libname as msvc expects it
@ -449,7 +450,7 @@ link_add_flags
@conftest
def autodetect(conf):
v = conf.env
compiler, path, includes, libdirs = detect_msvc(conf)
compiler, version, path, includes, libdirs = detect_msvc(conf)
v['PATH'] = path
v['CPPPATH'] = includes
v['LIBPATH'] = libdirs
@ -475,19 +476,16 @@ def find_msvc(conf):
v = conf.env
compiler, path, includes, libdirs = detect_msvc(conf)
v['PATH'] = path
v['CPPPATH'] = includes
v['LIBPATH'] = libdirs
compiler, version, path, includes, libdirs = detect_msvc(conf)
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11)
# compiler
cxx = None
if v['CXX']: cxx = v['CXX']
if v.CXX: cxx = v.CXX
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path)
if not cxx: conf.fatal('%s was not found (compiler)' % compiler_name)
if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
cxx = conf.cmd_to_list(cxx)
# before setting anything, check if the compiler is really msvc
@ -496,43 +494,50 @@ def find_msvc(conf):
if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
conf.fatal('the msvc compiler could not be identified')
# c/c++ compiler
v['CC'] = v['CXX'] = cxx
v['CC_NAME'] = v['CXX_NAME'] = 'msvc'
link = v.LINK_CXX
if not link:
link = conf.find_program(linker_name, path_list=path, mandatory=True)
ar = v.AR
if not ar:
ar = conf.find_program(lib_name, path_list=path, mandatory=True)
# environment flags
try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
except KeyError: pass
try: v.prepend_value('LIBPATH', conf.environ['LIB'])
except KeyError: pass
# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
mt = v.MT
if has_msvc_manifest:
mt = conf.find_program('MT', path_list=path, mandatory=True)
# linker
if not v['LINK_CXX']:
link = conf.find_program(linker_name, path_list=path)
if link: v['LINK_CXX'] = link
else: conf.fatal('%s was not found (linker)' % linker_name)
v['LINK'] = link
# no more possibility of failure means the data state will be consistent
# we may store the data safely now
if not v['LINK_CC']: v['LINK_CC'] = v['LINK_CXX']
v.MSVC_MANIFEST = has_msvc_manifest
v.PATH = path
v.CPPPATH = includes
v.LIBPATH = libdirs
# staticlib linker
if not v['AR']:
stliblink = conf.find_program(lib_name, path_list=path)
if not stliblink: return
v['AR'] = stliblink
v['ARFLAGS'] = ['/NOLOGO']
# c/c++ compiler
v.CC = v.CXX = cxx
v.CC_NAME = v.CXX_NAME = 'msvc'
v.LINK = v.LINK_CXX = link
if not v.LINK_CC:
v.LINK_CC = v.LINK_CXX
v.AR = ar
v.MT = mt
v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
manifesttool = conf.find_program('MT', path_list=path)
if manifesttool:
v['MT'] = manifesttool
v['MTFLAGS'] = ['/NOLOGO']
conf.check_tool('winres')
if not conf.env['WINRC']:
if not conf.env.WINRC:
warn('Resource compiler not found. Compiling resource file is disabled')
# environment flags
try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
except KeyError: pass
try: v.prepend_value('LIBPATH', conf.environ['LIB'])
except KeyError: pass
@conftest
def msvc_common_flags(conf):
v = conf.env
@ -702,28 +707,28 @@ def apply_manifest(self):
"""Special linker for MSVC with support for embedding manifests into DLL's
and executables compiled by Visual Studio 2005 or probably later. Without
the manifest file, the binaries are unusable.
See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
Problems with this tool: it is always called whether MSVC creates manifests or not."""
if self.env.CC_NAME != 'msvc':
return
See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
tsk = self.create_task('msvc_manifest')
tsk.set_inputs(self.link_task.outputs[0])
if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
out_node = self.link_task.outputs[0]
man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
self.link_task.outputs.append(man_node)
self.link_task.do_manifest = True
def exec_mf(self):
env = self.env
outfile = self.inputs[0].bldpath(env)
manifest = outfile + '.manifest'
if os.path.exists(manifest):
debug('msvc: manifesttool')
mtool = env['MT']
if not mtool:
return 0
mode = ''
self.do_manifest = False
outfile = self.outputs[0].bldpath(env)
manifest = self.outputs[-1].bldpath(env)
# embedding mode. Different for EXE's and DLL's.
# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
mode = ''
if 'cprogram' in self.generator.features:
mode = '1'
elif 'cshlib' in self.generator.features:
@ -742,12 +747,7 @@ def exec_mf(self):
#cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
# manifest, outfile, mode)
lst = [lst]
ret = self.exec_command(*lst)
return ret
cls = Task.task_type_from_func('msvc_manifest', vars=['MT', 'MTFLAGS'], color='BLUE', func=exec_mf, ext_in='.bin')
cls.quiet = 1
return self.exec_command(*lst)
########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
@ -769,7 +769,11 @@ def exec_command_msvc(self, *k, **kw):
env.update(PATH = ';'.join(self.env['PATH']))
kw['env'] = env
return self.generator.bld.exec_command(*k, **kw)
ret = self.generator.bld.exec_command(*k, **kw)
if ret: return ret
if getattr(self, 'do_manifest', None):
ret = exec_mf(self)
return ret
for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
cls = Task.TaskBase.classes.get(k, None)

72
tools/wafadmin/Tools/perl.py

@ -36,31 +36,29 @@ def check_perl_version(conf, minver=None):
Perl binary can be overridden by --with-perl-binary config variable
"""
res = True
if not getattr(Options.options, 'perlbinary', None):
perl = conf.find_program("perl", var="PERL")
if not perl:
return False
if getattr(Options.options, 'perlbinary', None):
conf.env.PERL = Options.options.perlbinary
else:
perl = Options.options.perlbinary
conf.env['PERL'] = perl
version = Utils.cmd_output(perl + " -e'printf \"%vd\", $^V'")
if not version:
res = False
version = "Unknown"
elif not minver is None:
ver = tuple(map(int, version.split(".")))
conf.find_program('perl', var='PERL', mandatory=True)
try:
version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
except:
conf.fatal('could not determine the perl version')
conf.env.PERL_VERSION = version
cver = ''
if minver:
try:
ver = tuple(map(int, version.split('.')))
except:
conf.fatal('unsupported perl version %r' % version)
if ver < minver:
res = False
conf.fatal('perl is too old')
if minver is None:
cver = ""
else:
cver = ".".join(map(str,minver))
conf.check_message("perl", cver, res, version)
return res
cver = '.'.join(map(str,minver))
conf.check_message('perl', cver, True, version)
@conf
def check_perl_module(conf, module):
@ -85,31 +83,25 @@ def check_perl_ext_devel(conf):
Sets different xxx_PERLEXT variables in the environment.
Also sets the ARCHDIR_PERL variable useful as installation path,
which can be overridden by --with-perl-archdir option.
which can be overridden by --with-perl-archdir
"""
if not conf.env['PERL']:
return False
perl = conf.env['PERL']
if not conf.env.PERL:
conf.fatal('perl detection is required first')
def read_out(cmd):
return Utils.to_list(Utils.cmd_output(perl + cmd))
conf.env["LINKFLAGS_PERLEXT"] = read_out(" -MConfig -e'print $Config{lddlflags}'")
conf.env["CPPPATH_PERLEXT"] = read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
conf.env["CCFLAGS_PERLEXT"] = read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
conf.env["XSUBPP"] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
conf.env["EXTUTILS_TYPEMAP"] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"')
conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"')
conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0]
if not getattr(Options.options, 'perlarchdir', None):
conf.env["ARCHDIR_PERL"] = Utils.cmd_output(perl + " -MConfig -e'print $Config{sitearch}'")
if getattr(Options.options, 'perlarchdir', None):
conf.env.ARCHDIR_PERL = Options.options.perlarchdir
else:
conf.env["ARCHDIR_PERL"] = getattr(Options.options, 'perlarchdir')
conf.env['perlext_PATTERN'] = '%s.' + Utils.cmd_output(perl + " -MConfig -e'print $Config{dlext}'")
return True
conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
def set_options(opt):
opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)

12
tools/wafadmin/Tools/preproc.py

@ -638,7 +638,7 @@ class c_parser(object):
self.count_files += 1
if self.count_files > 30000: raise PreprocError("recursion limit exceeded")
pc = self.parse_cache
debug('preproc: reading file %r' % filepath)
debug('preproc: reading file %r', filepath)
try:
lns = pc[filepath]
except KeyError:
@ -660,7 +660,7 @@ class c_parser(object):
traceback.print_exc()
def start(self, node, env):
debug('preproc: scanning %s (in %s)' % (node.name, node.parent.name))
debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
self.env = env
variant = node.variant(env)
@ -684,11 +684,11 @@ class c_parser(object):
self.process_line(kind, line)
except Exception, e:
if Logs.verbose:
debug('preproc: line parsing failed (%s): %s %s' % (e, line, Utils.ex_stack()))
debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
def process_line(self, token, line):
ve = Logs.verbose
if ve: debug('preproc: line is %s - %s state is %s' % (token, line, self.state))
if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
state = self.state
# make certain we define the state if we are about to enter in an if block
@ -718,7 +718,7 @@ class c_parser(object):
(kind, inc) = extract_include(line, self.defs)
if inc in self.ban_includes: return
if token == 'import': self.ban_includes.append(inc)
if ve: debug('preproc: include found %s (%s) ' % (inc, kind))
if ve: debug('preproc: include found %s (%s) ', inc, kind)
if kind == '"' or not strict_quotes:
self.tryfind(inc)
elif token == 'elif':
@ -734,7 +734,7 @@ class c_parser(object):
m = re_mac.search(line)
if m:
name = m.group(0)
if ve: debug('preproc: define %s %s' % (name, line))
if ve: debug('preproc: define %s %s', name, line)
self.defs[name] = line
else:
raise PreprocError("invalid define line %s" % line)

4
tools/wafadmin/Tools/python.py

@ -360,10 +360,10 @@ def check_python_version(conf, minver=None):
conf.check_message_custom('Python version', '', pyver_full)
else:
minver_str = '.'.join(map(str, minver))
conf.check_message('Python version', ">= %s" % (minver_str,), result, option=pyver_full)
conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
if not result:
conf.fatal('The python version is too old (%r)' % minver)
conf.fatal('The python version is too old (%r)' % pyver_full)
@conf
def check_python_module(conf, module_name):

45
tools/wafadmin/Tools/qt4.py

@ -21,7 +21,7 @@ else:
import os, sys
import ccroot, cxx
import TaskGen, Task, Utils, Runner, Options, Node
import TaskGen, Task, Utils, Runner, Options, Node, Configure
from TaskGen import taskgen, feature, after, extension
from Logs import error
from Constants import *
@ -261,12 +261,8 @@ def apply_qt4(self):
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst = []
for flag in self.to_list(self.env['CXXFLAGS']):
if len(flag) < 2: continue
if flag[0:2] == '-D' or flag[0:2] == '-I':
lst.append(flag)
self.env['MOC_FLAGS'] = lst
self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
@extension(EXT_QT4)
def cxx_hook(self, node):
@ -366,7 +362,7 @@ def detect_qt4(conf):
if not qtlibs:
try:
qtlibs = Utils.cmd_output([qmake, '-query', 'QT_LIBRARIES']).strip() + os.sep
qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
except ValueError:
qtlibs = os.path.join(qtdir, 'lib')
@ -409,10 +405,39 @@ def detect_qt4(conf):
vars_debug = [a+'_debug' for a in vars]
pkgconfig = env['pkg-config'] or 'PKG_CONFIG_PATH=%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib pkg-config --silence-errors' % (qtlibs, qtlibs)
try:
conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
except Configure.ConfigurationError:
for lib in vars_debug+vars:
uselib = lib.upper()
d = (lib.find('_debug') > 0) and 'd' or ''
# original author seems to prefer static to shared libraries
for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
conf.check_message_1('Checking for %s %s' % (lib, kind))
for ext in ['', '4']:
path = os.path.join(qtlibs, pat % (lib + d + ext))
if os.path.exists(path):
env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
conf.check_message_2('ok ' + path, 'GREEN')
break
else:
conf.check_message_2('not found', 'YELLOW')
continue
break
env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('CPPPATH_' + uselib, qtincludes)
env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
else:
for i in vars_debug+vars:
try:
conf.check_cfg(package=i, args='--cflags --libs', path=pkgconfig)
conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
except ValueError:
pass

120
tools/wafadmin/Tools/ruby.py

@ -0,0 +1,120 @@
#!/usr/bin/env python
# encoding: utf-8
# daniel.svensson at purplescout.se 2008
import os
import Task, Options, Utils
from TaskGen import before, feature, after
from Configure import conf
@feature('rubyext')
@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
@after('default_cc', 'vars_target_cshlib')
def init_rubyext(self):
self.default_install_path = '${ARCHDIR_RUBY}'
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'RUBY' in self.uselib:
self.uselib.append('RUBY')
if not 'RUBYEXT' in self.uselib:
self.uselib.append('RUBYEXT')
@feature('rubyext')
@before('apply_link')
def apply_ruby_so_name(self):
self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
@conf
def check_ruby_version(conf, minver=()):
"""
Checks if ruby is installed.
If installed the variable RUBY will be set in environment.
Ruby binary can be overridden by --with-ruby-binary config variable
"""
if Options.options.rubybinary:
conf.env.RUBY = Options.options.rubybinary
else:
conf.find_program("ruby", var="RUBY", mandatory=True)
ruby = conf.env.RUBY
try:
version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
except:
conf.fatal('could not determine ruby version')
conf.env.RUBY_VERSION = version
try:
ver = tuple(map(int, version.split(".")))
except:
conf.fatal('unsupported ruby version %r' % version)
cver = ''
if minver:
if ver < minver:
conf.fatal('ruby is too old')
cver = ".".join(str(x) for x in minver)
conf.check_message('ruby', cver, True, version)
@conf
def check_ruby_ext_devel(conf):
if not conf.env.RUBY:
conf.fatal('ruby detection is required first')
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a c/c++ compiler first')
version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
def read_out(cmd):
return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
def read_config(key):
return read_out('puts Config::CONFIG[%r]' % key)
ruby = conf.env['RUBY']
archdir = read_config('archdir')
cpppath = archdir
if version >= (1, 9, 0):
ruby_hdrdir = read_config('rubyhdrdir')
cpppath += ruby_hdrdir
cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
conf.env.LIBPATH_RUBYEXT = read_config('libdir')
conf.env.LIBPATH_RUBYEXT += archdir
conf.env.CPPPATH_RUBYEXT = cpppath
conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
# ok this is really stupid, but the command and flags are combined.
# so we try to find the first argument...
flags = read_config('LDSHARED')
while flags and flags[0][0] != '-':
flags = flags[1:]
# we also want to strip out the deprecated ppc flags
if len(flags) > 1 and flags[1] == "ppc":
flags = flags[2:]
conf.env.LINKFLAGS_RUBYEXT = flags
conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
if Options.options.rubyarchdir:
conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
else:
conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
if Options.options.rubylibdir:
conf.env.LIBDIR_RUBY = Options.options.rubylibdir
else:
conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
def set_options(opt):
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')

64
tools/wafadmin/Tools/UnitTest.py → tools/wafadmin/Tools/unittestw.py

@ -201,7 +201,7 @@ Total number of tests: %i
New unit test system
The targets with feature 'test' are executed after they are built
bld.new_task_gen(features='cprogram cc test', ...)
bld(features='cprogram cc test', ...)
To display the results:
import UnitTest
@ -211,6 +211,9 @@ bld.add_post_fun(UnitTest.summary)
import threading
testlock = threading.Lock()
def set_options(opt):
opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
@feature('test')
@after('apply_link', 'vars_target_cprogram')
def make_test(self):
@ -219,13 +222,13 @@ def make_test(self):
return
self.default_install_path = None
tsk = self.create_task('utest')
tsk.set_inputs(self.link_task.outputs)
self.create_task('utest', self.link_task.outputs)
def exec_test(self):
testlock.acquire()
fail = False
try:
status = 0
variant = self.env.variant()
filename = self.inputs[0].abspath(self.env)
try:
@ -237,11 +240,12 @@ def exec_test(self):
lst = []
for obj in self.generator.bld.all_task_gen:
link_task = getattr(obj, 'link_task', None)
if link_task:
if link_task and link_task.env.variant() == variant:
lst.append(link_task.outputs[0].parent.abspath(obj.env))
def add_path(dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
if sys.platform == 'win32':
add_path(fu, lst, 'PATH')
elif sys.platform == 'darwin':
@ -250,21 +254,26 @@ def exec_test(self):
else:
add_path(fu, lst, 'LD_LIBRARY_PATH')
try:
ret = Utils.cmd_output(filename, cwd=self.inputs[0].parent.abspath(self.env), env=fu)
except Exception, e:
fail = True
ret = '' + str(e)
else:
pass
stats = getattr(self.generator.bld, 'utest_results', [])
stats.append((filename, fail, ret))
self.generator.bld.utest_results = stats
cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
proc = Utils.pproc.Popen(filename, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
(stdout, stderr) = proc.communicate()
tup = (filename, proc.returncode, stdout, stderr)
self.generator.utest_result = tup
testlock.acquire()
try:
bld = self.generator.bld
Logs.debug("ut: %r", tup)
try:
bld.utest_results.append(tup)
except AttributeError:
bld.utest_results = [tup]
finally:
testlock.release()
cls = Task.task_type_from_func('utest', func=exec_test, color='RED', ext_in='.bin')
cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
old = cls.runnable_status
def test_status(self):
@ -279,11 +288,18 @@ def summary(bld):
lst = getattr(bld, 'utest_results', [])
if lst:
Utils.pprint('CYAN', 'execution summary')
for (f, fail, ret) in lst:
col = fail and 'RED' or 'GREEN'
Utils.pprint(col, (fail and 'FAIL' or 'ok') + " " + f)
if fail: Utils.pprint('NORMAL', ret.replace('\\n', '\n'))
def set_options(opt):
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
total = len(lst)
tfail = len([x for x in lst if x[1]])
Utils.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
for (f, code, out, err) in lst:
if not code:
Utils.pprint('CYAN', ' %s' % f)
Utils.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
for (f, code, out, err) in lst:
if code:
Utils.pprint('CYAN', ' %s' % f)

77
tools/wafadmin/Tools/xlc.py

@ -0,0 +1,77 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_xlc(conf):
cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
cc = conf.cmd_to_list(cc)
conf.env.CC_NAME = 'xlc'
conf.env.CC = cc
@conftest
def find_cpp(conf):
v = conf.env
cpp = None
if v['CPP']: cpp = v['CPP']
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
if not cpp: cpp = v['CC']
v['CPP'] = cpp
@conftest
def xlc_common_flags(conf):
v = conf.env
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
v['CCFLAGS_DEBUG'] = ['-g']
v['CCFLAGS_RELEASE'] = ['-O2']
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CCDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = ''
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ''
v['staticlib_PATTERN'] = 'lib%s.a'
def detect(conf):
conf.find_xlc()
conf.find_cpp()
conf.find_ar()
conf.xlc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()

77
tools/wafadmin/Tools/xlcxx.py

@ -0,0 +1,77 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_xlcxx(conf):
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
cxx = conf.cmd_to_list(cxx)
conf.env.CXX_NAME = 'xlc++'
conf.env.CXX = cxx
@conftest
def find_cpp(conf):
v = conf.env
cpp = None
if v['CPP']: cpp = v['CPP']
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
if not cpp: cpp = v['CXX']
v['CPP'] = cpp
@conftest
def xlcxx_common_flags(conf):
v = conf.env
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
v['CXXFLAGS_DEBUG'] = ['-g']
v['CXXFLAGS_RELEASE'] = ['-O2']
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = ''
v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CXXDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = ''
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ''
v['staticlib_PATTERN'] = 'lib%s.a'
def detect(conf):
conf.find_xlcxx()
conf.find_cpp()
conf.find_ar()
conf.xlcxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()

67
tools/wafadmin/Utils.py

@ -189,6 +189,7 @@ if is_win32:
Logs.info(stdout)
if stderr:
Logs.error(stderr)
return proc.returncode
else:
proc = pproc.Popen(s,**kw)
return proc.wait()
@ -226,13 +227,15 @@ def waf_version(mini = 0x010000, maxi = 0x100000):
sys.exit(0)
def python_24_guard():
if sys.hexversion<0x20400f0:
raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4")
if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
def ex_stack():
exc_type, exc_value, tb = sys.exc_info()
if Logs.verbose > 1:
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
return ''.join(exc_lines)
return str(exc_value)
def to_list(sth):
if isinstance(sth, str):
@ -262,16 +265,12 @@ def load_module(file_path, name=WSCRIPT_FILE):
module.waf_hash_val = code
module_dir = os.path.dirname(file_path)
sys.path.insert(0, module_dir)
try:
exec(code, module.__dict__)
except Exception, e:
sys.path.insert(0, os.path.dirname(file_path))
try:
exec(compile(code, file_path, 'exec'), module.__dict__)
except Exception:
raise WscriptError(traceback.format_exc(), file_path)
except:
raise e
sys.path.remove(module_dir)
sys.path.pop(0)
g_loaded_modules[file_path] = module
@ -522,6 +521,15 @@ def detect_platform():
return s
def load_tool(tool, tooldir=None):
'''
load_tool: import a Python module, optionally using several directories.
@param tool [string]: name of tool to import.
@param tooldir [list]: directories to look for the tool.
@return: the loaded module.
Warning: this function is not thread-safe: plays with sys.path,
so must run in sequence.
'''
if tooldir:
assert isinstance(tooldir, list)
sys.path = tooldir + sys.path
@ -529,11 +537,11 @@ def load_tool(tool, tooldir=None):
try:
return __import__(tool)
except ImportError, e:
raise WscriptError('Could not load the tool %r in %r' % (tool, sys.path))
Logs.error('Could not load the tool %r in %r:\n%s' % (tool, sys.path, e))
raise
finally:
if tooldir:
for d in tooldir:
sys.path.remove(d)
sys.path = sys.path[len(tooldir):]
def readf(fname, m='r'):
"get the contents of a file, it is not used anywhere for the moment"
@ -589,9 +597,10 @@ class Context(object):
nexdir = os.path.join(self.curdir, x)
base = os.path.join(nexdir, WSCRIPT_FILE)
file_path = base + '_' + name
try:
txt = readf(base + '_' + name, m='rU')
txt = readf(file_path, m='rU')
except (OSError, IOError):
try:
module = load_module(base)
@ -616,21 +625,18 @@ class Context(object):
else:
dc = {'ctx': self}
if getattr(self.__class__, 'pre_recurse', None):
dc = self.pre_recurse(txt, base + '_' + name, nexdir)
dc = self.pre_recurse(txt, file_path, nexdir)
old = self.curdir
self.curdir = nexdir
try:
try:
exec(txt, dc)
except Exception, e:
try:
exec(compile(txt, file_path, 'exec'), dc)
except Exception:
raise WscriptError(traceback.format_exc(), base)
except:
raise e
finally:
self.curdir = old
if getattr(self.__class__, 'post_recurse', None):
self.post_recurse(txt, base + '_' + name, nexdir)
self.post_recurse(txt, file_path, nexdir)
if is_win32:
old = shutil.copy2
@ -639,6 +645,25 @@ if is_win32:
shutil.copystat(src, src)
setattr(shutil, 'copy2', copy2)
def zip_folder(dir, zip_file_name, prefix):
"""
prefix represents the app to add in the archive
"""
import zipfile
zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
base = os.path.abspath(dir)
if prefix:
if prefix[-1] != os.sep:
prefix += os.sep
n = len(base)
for root, dirs, files in os.walk(base):
for f in files:
archive_name = prefix + root[n:] + os.sep + f
zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
zip.close()
def get_elapsed_time(start):
"Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
delta = datetime.datetime.now() - start

31
tools/wafadmin/ansiterm.py

@ -1,5 +1,8 @@
import sys, os
try:
if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
raise ValueError('not a tty')
from ctypes import *
class COORD(Structure):
@ -8,19 +11,25 @@ try:
class SMALL_RECT(Structure):
_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
class CONSOLE_CURSOR_INFO(Structure):
_fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
csinfo = CONSOLE_CURSOR_INFO()
hconsole = windll.kernel32.GetStdHandle(-11)
windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
except Exception:
pass
else:
import re
import re, threading
to_int = lambda number, default: number and int(number) or default
wlock = threading.Lock()
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
@ -63,8 +72,7 @@ else:
windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
else: # Clear from cursor position to end of screen
clear_start = sbinfo.CursorPosition
clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) +
sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
chars_written = c_int()
windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
@ -159,6 +167,14 @@ else:
attrib = self.escape_to_color.get((intensity, color), 0x7)
windll.kernel32.SetConsoleTextAttribute(self.hconsole, attrib)
def show_cursor(self,param):
csinfo.bVisible = 1
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
def hide_cursor(self,param):
csinfo.bVisible = 0
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
ansi_command_table = {
'A': move_up,
'B': move_down,
@ -171,13 +187,16 @@ else:
'f': set_cursor,
'J': clear_screen,
'K': clear_line,
'h': show_cursor,
'l': hide_cursor,
'm': set_color,
's': push_cursor,
'u': pop_cursor,
}
# Match either the escape sequence or text not containing escape sequence
ansi_tokans = re.compile('(?:\x1b\[([0-9;]*)([a-zA-Z])|([^\x1b]+))')
ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
def write(self, text):
wlock.acquire()
for param, cmd, txt in self.ansi_tokans.findall(text):
if cmd:
cmd_func = self.ansi_command_table.get(cmd)
@ -189,7 +208,7 @@ else:
windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
else:
windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
wlock.release()
def flush(self):
pass

14
tools/wafadmin/py3kfixes.py

@ -61,14 +61,20 @@ def r3(code):
code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
return code
@subst('ansiterm.py')
def r33(code):
code = code.replace('unicode', 'str')
return code
@subst('Task.py')
def r4(code):
code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
code = code.replace("up(x.name)", "up(x.name.encode())")
code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'class TaskBase(object, metaclass=store_task_type):')
code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
return code
@subst('Build.py')
@ -92,7 +98,7 @@ def r7(code):
code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
return code
@subst('Tools/config_c.py')
@subst('Tools/python.py')
def r8(code):
code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
return code
@ -102,6 +108,10 @@ def r9(code):
code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
return code
@subst('Tools/config_c.py')
def r10(code):
code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
return code
def fixdir(dir):
global all_modifs

Loading…
Cancel
Save