Browse Source

Use waf-light instead of waf

This way there won't be strange tools/.waf-12343 directories hanging about.
All that waf needs to run is inside the tools/wafadmin directory.
v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
84d2966377
  1. 2
      configure
  2. BIN
      tools/waf
  3. 150
      tools/waf-light
  4. 963
      tools/wafadmin/Build.py
  5. 336
      tools/wafadmin/Configure.py
  6. 76
      tools/wafadmin/Constants.py
  7. 205
      tools/wafadmin/Environment.py
  8. 128
      tools/wafadmin/Logs.py
  9. 660
      tools/wafadmin/Node.py
  10. 273
      tools/wafadmin/Options.py
  11. 219
      tools/wafadmin/Runner.py
  12. 561
      tools/wafadmin/Scripting.py
  13. 1078
      tools/wafadmin/Task.py
  14. 576
      tools/wafadmin/TaskGen.py
  15. 266
      tools/wafadmin/Tools/UnitTest.py
  16. 4
      tools/wafadmin/Tools/__init__.py
  17. 36
      tools/wafadmin/Tools/ar.py
  18. 42
      tools/wafadmin/Tools/bison.py
  19. 335
      tools/wafadmin/Tools/boost.py
  20. 102
      tools/wafadmin/Tools/cc.py
  21. 613
      tools/wafadmin/Tools/ccroot.py
  22. 67
      tools/wafadmin/Tools/compiler_cc.py
  23. 61
      tools/wafadmin/Tools/compiler_cxx.py
  24. 33
      tools/wafadmin/Tools/compiler_d.py
  25. 680
      tools/wafadmin/Tools/config_c.py
  26. 69
      tools/wafadmin/Tools/cs.py
  27. 106
      tools/wafadmin/Tools/cxx.py
  28. 532
      tools/wafadmin/Tools/d.py
  29. 37
      tools/wafadmin/Tools/dbus.py
  30. 64
      tools/wafadmin/Tools/dmd.py
  31. 26
      tools/wafadmin/Tools/flex.py
  32. 40
      tools/wafadmin/Tools/gas.py
  33. 121
      tools/wafadmin/Tools/gcc.py
  34. 52
      tools/wafadmin/Tools/gdc.py
  35. 165
      tools/wafadmin/Tools/glib2.py
  36. 214
      tools/wafadmin/Tools/gnome.py
  37. 111
      tools/wafadmin/Tools/gnu_dirs.py
  38. 18
      tools/wafadmin/Tools/gob2.py
  39. 125
      tools/wafadmin/Tools/gxx.py
  40. 36
      tools/wafadmin/Tools/icc.py
  41. 34
      tools/wafadmin/Tools/icpc.py
  42. 143
      tools/wafadmin/Tools/intltool.py
  43. 255
      tools/wafadmin/Tools/javaw.py
  44. 76
      tools/wafadmin/Tools/kde4.py
  45. 333
      tools/wafadmin/Tools/libtool.py
  46. 25
      tools/wafadmin/Tools/lua.py
  47. 433
      tools/wafadmin/Tools/misc.py
  48. 775
      tools/wafadmin/Tools/msvc.py
  49. 52
      tools/wafadmin/Tools/nasm.py
  50. 313
      tools/wafadmin/Tools/ocaml.py
  51. 185
      tools/wafadmin/Tools/osx.py
  52. 120
      tools/wafadmin/Tools/perl.py
  53. 809
      tools/wafadmin/Tools/preproc.py
  54. 401
      tools/wafadmin/Tools/python.py
  55. 531
      tools/wafadmin/Tools/qt4.py
  56. 75
      tools/wafadmin/Tools/suncc.py
  57. 69
      tools/wafadmin/Tools/suncxx.py
  58. 235
      tools/wafadmin/Tools/tex.py
  59. 273
      tools/wafadmin/Tools/vala.py
  60. 49
      tools/wafadmin/Tools/winres.py
  61. 652
      tools/wafadmin/Utils.py
  62. 3
      tools/wafadmin/__init__.py
  63. 620
      tools/wafadmin/pproc.py
  64. 95
      tools/wafadmin/py3kfixes.py

2
configure

@ -24,7 +24,7 @@ cd $WORKINGDIR
WORKINGDIR=`pwd`
cd $CUR_DIR
WAF="${WORKINGDIR}/tools/waf"
WAF="${WORKINGDIR}/tools/waf-light"
# Checks for WAF. Honours $WAF if set. Stores path to 'waf' in $WAF.
# Requires that $PYTHON is set.

BIN
tools/waf

Binary file not shown.

150
tools/waf-light

@ -0,0 +1,150 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2009
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os, sys
if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3")
if 'PSYCOWAF' in os.environ:
try:import psyco;psyco.full()
except:pass
VERSION="1.5.9"
REVISION="x"
INSTALL="x"
C1='x'
C2='x'
cwd = os.getcwd()
join = os.path.join
WAF='waf'
def b(x):
return x
if sys.hexversion>0x300000f:
WAF='waf3'
def b(x):
return x.encode()
def err(m):
print(('\033[91mError: %s\033[0m' % m))
sys.exit(1)
def unpack_wafdir(dir):
f = open(sys.argv[0],'rb')
c = "corrupted waf (%d)"
while 1:
line = f.readline()
if not line: err("run waf-light from a folder containing wafadmin")
if line == b('#==>\n'):
txt = f.readline()
if not txt: err(c % 1)
if f.readline()!=b('#<==\n'): err(c % 2)
break
if not txt: err(c % 3)
txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))
import shutil, tarfile
try: shutil.rmtree(dir)
except OSError: pass
try: os.makedirs(join(dir, 'wafadmin', 'Tools'))
except OSError: err("Cannot unpack waf lib into %s\nMove waf into a writeable directory" % dir)
os.chdir(dir)
tmp = 't.tbz2'
t = open(tmp,'wb')
t.write(txt)
t.close()
try:
t = tarfile.open(tmp)
for x in t: t.extract(x)
t.close()
except:
os.chdir(cwd)
try: shutil.rmtree(dir)
except OSError: pass
err("Waf cannot be unpacked, check that bzip2 support is present")
os.chmod(join('wafadmin','Tools'), 493)
os.unlink(tmp)
if sys.hexversion>0x300000f:
sys.path = [join(dir, 'wafadmin')] + sys.path
import py3kfixes
py3kfixes.fixdir(dir)
os.chdir(cwd)
def test(dir):
try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir)
except OSError: pass
def find_lib():
name = sys.argv[0]
base = os.path.dirname(os.path.abspath(name))
#devs use $WAFDIR
w=test(os.environ.get('WAFDIR', ''))
if w: return w
#waf-light
if name.endswith('waf-light'):
w = test(base)
if w: return w
err("waf-light requires wafadmin -> export WAFDIR=/folder")
dir = "/lib/%s-%s-%s/" % (WAF, VERSION, REVISION)
for i in [INSTALL,'/usr','/usr/local','/opt']:
w = test(i+dir)
if w: return w
#waf-local
s = '.%s-%s-%s'
if sys.platform == 'win32': s = s[1:]
dir = join(base, s % (WAF, VERSION, REVISION))
w = test(dir)
if w: return w
#unpack
unpack_wafdir(dir)
return dir
wafdir = find_lib()
w = join(wafdir, 'wafadmin')
t = join(w, 'Tools')
sys.path = [w, t] + sys.path
import Scripting
Scripting.prepare(t, cwd, VERSION, wafdir)
sys.exit(0)

963
tools/wafadmin/Build.py

@ -0,0 +1,963 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""
Dependency tree holder
The class Build holds all the info related to a build:
* file system representation (tree of Node instances)
* various cached objects (task signatures, file scan results, ..)
There is only one Build object at a time (bld singleton)
"""
import os, sys, errno, re, glob, gc, datetime, shutil
try: import cPickle
except: import pickle as cPickle
import Runner, TaskGen, Node, Scripting, Utils, Environment, Task, Logs, Options
from Logs import debug, error, info
from Constants import *
SAVED_ATTRS = 'root srcnode bldnode node_sigs node_deps raw_deps task_sigs id_nodes'.split()
"Build class members to save"
bld = None
"singleton - safe to use when Waf is not used as a library"
class BuildError(Utils.WafError):
def __init__(self, b=None, t=[]):
self.bld = b
self.tasks = t
self.ret = 1
Utils.WafError.__init__(self, self.format_error())
def format_error(self):
lst = ['Build failed']
for tsk in self.tasks:
txt = tsk.format_error()
if txt: lst.append(txt)
return '\n'.join(lst)
def group_method(fun):
"""
sets a build context method to execute after the current group has finished executing
this is useful for installing build files:
* calling install_files/install_as will fail if called too early
* people do not want to define install method in their task classes
TODO: try it
"""
def f(*k, **kw):
if not k[0].is_install:
return False
postpone = True
if 'postpone' in kw:
postpone = kw['postpone']
del kw['postpone']
if postpone:
m = k[0].task_manager
m.groups[m.current_group].post_funs.append((fun, k, kw))
kw['cwd'] = k[0].path
else:
fun(*k, **kw)
return f
class BuildContext(Utils.Context):
"holds the dependency tree"
def __init__(self):
# not a singleton, but provided for compatibility
global bld
bld = self
self.task_manager = Task.TaskManager()
# instead of hashing the nodes, we assign them a unique id when they are created
self.id_nodes = 0
self.idx = {}
# map names to environments, the 'default' must be defined
self.all_envs = {}
# ======================================= #
# code for reading the scripts
# project build directory - do not reset() from load_dirs()
self.bdir = ''
# the current directory from which the code is run
# the folder changes everytime a wscript is read
self.path = None
# Manual dependencies.
self.deps_man = Utils.DefaultDict(list)
# ======================================= #
# cache variables
# local cache for absolute paths - cache_node_abspath[variant][node]
self.cache_node_abspath = {}
# list of folders that are already scanned
# so that we do not need to stat them one more time
self.cache_scanned_folders = {}
# list of targets to uninstall for removing the empty folders after uninstalling
self.uninstall = []
# ======================================= #
# tasks and objects
# build dir variants (release, debug, ..)
for v in 'cache_node_abspath task_sigs node_deps raw_deps node_sigs'.split():
var = {}
setattr(self, v, var)
self.cache_dir_contents = {}
self.all_task_gen = []
self.task_gen_cache_names = {}
self.cache_sig_vars = {}
self.log = None
self.root = None
self.srcnode = None
self.bldnode = None
# bind the build context to the nodes in use
# this means better encapsulation and no build context singleton
class node_class(Node.Node):
pass
self.node_class = node_class
self.node_class.__module__ = "Node"
self.node_class.__name__ = "Nodu"
self.node_class.bld = self
self.is_install = None
def __copy__(self):
"nodes are not supposed to be copied"
raise Utils.WafError('build contexts are not supposed to be cloned')
def load(self):
"load the cache from the disk"
try:
env = Environment.Environment(os.path.join(self.cachedir, 'build.config.py'))
except (IOError, OSError):
pass
else:
if env['version'] < HEXVERSION:
raise Utils.WafError('Version mismatch! reconfigure the project')
for t in env['tools']:
self.setup(**t)
try:
gc.disable()
f = data = None
Node.Nodu = self.node_class
try:
f = open(os.path.join(self.bdir, DBFILE), 'rb')
except (IOError, EOFError):
# handle missing file/empty file
pass
try:
if f: data = cPickle.load(f)
except AttributeError:
# handle file of an old Waf version
# that has an attribute which no longer exist
# (e.g. AttributeError: 'module' object has no attribute 'BuildDTO')
if Logs.verbose > 1: raise
if data:
for x in SAVED_ATTRS: setattr(self, x, data[x])
else:
debug('build: Build cache loading failed')
finally:
if f: f.close()
gc.enable()
def save(self):
"store the cache on disk, see self.load"
gc.disable()
self.root.__class__.bld = None
# some people are very nervous with ctrl+c so we have to make a temporary file
Node.Nodu = self.node_class
db = os.path.join(self.bdir, DBFILE)
file = open(db + '.tmp', 'wb')
data = {}
for x in SAVED_ATTRS: data[x] = getattr(self, x)
cPickle.dump(data, file, -1)
file.close()
# do not use shutil.move
try: os.unlink(db)
except OSError: pass
os.rename(db + '.tmp', db)
self.root.__class__.bld = self
gc.enable()
# ======================================= #
def clean(self):
debug('build: clean called')
# does not clean files created during the configuration
precious = set([])
for env in self.all_envs.values():
for x in env[CFG_FILES]:
node = self.srcnode.find_resource(x)
if node:
precious.add(node.id)
def clean_rec(node):
for x in list(node.childs.keys()):
nd = node.childs[x]
tp = nd.id & 3
if tp == Node.DIR:
clean_rec(nd)
elif tp == Node.BUILD:
if nd.id in precious: continue
for env in self.all_envs.values():
try: os.remove(nd.abspath(env))
except OSError: pass
node.childs.__delitem__(x)
clean_rec(self.srcnode)
for v in 'node_sigs node_deps task_sigs raw_deps cache_node_abspath'.split():
setattr(self, v, {})
def compile(self):
"""The cache file is not written if nothing was build at all (build is up to date)"""
debug('build: compile called')
"""
import cProfile, pstats
cProfile.run("import Build\nBuild.bld.flush()", 'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('cumulative').print_stats(80)
"""
self.flush()
#"""
self.generator = Runner.Parallel(self, Options.options.jobs)
def dw(on=True):
if Options.options.progress_bar:
if on: sys.stderr.write(Logs.colors.cursor_on)
else: sys.stderr.write(Logs.colors.cursor_off)
debug('build: executor starting')
back = os.getcwd()
os.chdir(self.bldnode.abspath())
try:
try:
dw(on=False)
self.generator.start()
except KeyboardInterrupt:
dw()
if self.generator.consumers:
self.save()
raise
except Exception:
dw()
# do not store anything, for something bad happened
raise
else:
dw()
if self.generator.consumers:
self.save()
if self.generator.error:
raise BuildError(self, self.task_manager.tasks_done)
finally:
os.chdir(back)
def install(self):
"this function is called for both install and uninstall"
debug('build: install called')
self.flush()
# remove empty folders after uninstalling
if self.is_install < 0:
lst = []
for x in self.uninstall:
dir = os.path.dirname(x)
if not dir in lst: lst.append(dir)
lst.sort()
lst.reverse()
nlst = []
for y in lst:
x = y
while len(x) > 4:
if not x in nlst: nlst.append(x)
x = os.path.dirname(x)
nlst.sort()
nlst.reverse()
for x in nlst:
try: os.rmdir(x)
except OSError: pass
def new_task_gen(self, *k, **kw):
kw['bld'] = self
if len(k) == 0:
ret = TaskGen.task_gen(*k, **kw)
else:
cls_name = k[0]
try: cls = TaskGen.task_gen.classes[cls_name]
except KeyError: raise Utils.WscriptError('%s is not a valid task generator -> %s' %
(cls_name, [x for x in TaskGen.task_gen.classes]))
ret = cls(*k, **kw)
return ret
def load_envs(self):
try:
lst = Utils.listdir(self.cachedir)
except OSError, e:
if e.errno == errno.ENOENT:
raise Utils.WafError('The project was not configured: run "waf configure" first!')
else:
raise
if not lst:
raise Utils.WafError('The cache directory is empty: reconfigure the project')
for file in lst:
if file.endswith(CACHE_SUFFIX):
env = Environment.Environment(os.path.join(self.cachedir, file))
name = file[:-len(CACHE_SUFFIX)]
self.all_envs[name] = env
self.init_variants()
for env in self.all_envs.values():
for f in env[CFG_FILES]:
newnode = self.path.find_or_declare(f)
try:
hash = Utils.h_file(newnode.abspath(env))
except (IOError, AttributeError):
error("cannot find "+f)
hash = SIG_NIL
self.node_sigs[env.variant()][newnode.id] = hash
# TODO: hmmm, these nodes are removed from the tree when calling rescan()
self.bldnode = self.root.find_dir(self.bldnode.abspath())
self.path = self.srcnode = self.root.find_dir(self.srcnode.abspath())
self.cwd = self.bldnode.abspath()
def setup(self, tool, tooldir=None, funs=None):
"setup tools for build process"
if isinstance(tool, list):
for i in tool: self.setup(i, tooldir)
return
if not tooldir: tooldir = Options.tooldir
module = Utils.load_tool(tool, tooldir)
if hasattr(module, "setup"): module.setup(self)
def init_variants(self):
debug('build: init variants')
lstvariants = []
for env in self.all_envs.values():
if not env.variant() in lstvariants:
lstvariants.append(env.variant())
self.lst_variants = lstvariants
debug('build: list of variants is %r' % lstvariants)
for name in lstvariants+[0]:
for v in 'node_sigs cache_node_abspath'.split():
var = getattr(self, v)
if not name in var:
var[name] = {}
# ======================================= #
# node and folder handling
# this should be the main entry point
def load_dirs(self, srcdir, blddir, load_cache=1):
"this functions should be the start of everything"
assert(os.path.isabs(srcdir))
assert(os.path.isabs(blddir))
self.cachedir = os.path.join(blddir, CACHE_DIR)
if srcdir == blddir:
raise Utils.WafError("build dir must be different from srcdir: %s <-> %s " % (srcdir, blddir))
self.bdir = blddir
# try to load the cache file, if it does not exist, nothing happens
self.load()
if not self.root:
Node.Nodu = self.node_class
self.root = Node.Nodu('', None, Node.DIR)
if not self.srcnode:
self.srcnode = self.root.ensure_dir_node_from_path(srcdir)
debug('build: srcnode is %s and srcdir %s' % (self.srcnode.name, srcdir))
self.path = self.srcnode
# create this build dir if necessary
try: os.makedirs(blddir)
except OSError: pass
if not self.bldnode:
self.bldnode = self.root.ensure_dir_node_from_path(blddir)
self.init_variants()
def rescan(self, src_dir_node):
"""
look the contents of a (folder)node and update its list of childs
The intent is to perform the following steps
* remove the nodes for the files that have disappeared
* remove the signatures for the build files that have disappeared
* cache the results of os.listdir
* create the build folder equivalent (mkdir) for each variant
src/bar -> build/default/src/bar, build/release/src/bar
when a folder in the source directory is removed, we do not check recursively
to remove the unused nodes. To do that, call 'waf clean' and build again.
"""
# do not rescan over and over again
# TODO use a single variable in waf 1.6
if self.cache_scanned_folders.get(src_dir_node.id, None): return
self.cache_scanned_folders[src_dir_node.id] = True
# TODO remove in waf 1.6
if hasattr(self, 'repository'): self.repository(src_dir_node)
if not src_dir_node.name and sys.platform == 'win32':
# the root has no name, contains drive letters, and cannot be listed
return
# first, take the case of the source directory
parent_path = src_dir_node.abspath()
try:
lst = set(Utils.listdir(parent_path))
except OSError:
lst = set([])
# TODO move this at the bottom
self.cache_dir_contents[src_dir_node.id] = lst
# hash the existing source files, remove the others
cache = self.node_sigs[0]
for x in src_dir_node.childs.values():
if x.id & 3 != Node.FILE: continue
if x.name in lst:
try:
cache[x.id] = Utils.h_file(x.abspath())
except IOError:
raise Utils.WafError('The file %s is not readable or has become a dir' % x.abspath())
else:
try: del cache[x.id]
except KeyError: pass
del src_dir_node.childs[x.name]
# first obtain the differences between srcnode and src_dir_node
h1 = self.srcnode.height()
h2 = src_dir_node.height()
lst = []
child = src_dir_node
while h2 > h1:
lst.append(child.name)
child = child.parent
h2 -= 1
lst.reverse()
# list the files in the build dirs
# remove the existing timestamps if the build files are removed
for variant in self.lst_variants:
sub_path = os.path.join(self.bldnode.abspath(), variant , *lst)
try:
self.listdir_bld(src_dir_node, sub_path, variant)
except OSError:
#debug('build: osError on ' + sub_path)
# listdir failed, remove all sigs of nodes
# TODO more things to remove?
dict = self.node_sigs[variant]
for node in src_dir_node.childs.values():
if node.id in dict:
dict.__delitem__(node.id)
# avoid deleting the build dir node
if node.id != self.bldnode.id:
src_dir_node.childs.__delitem__(node.name)
os.makedirs(sub_path)
# ======================================= #
def listdir_src(self, parent_node):
"""do not use, kept for compatibility"""
pass
def remove_node(self, node):
"""do not use, kept for compatibility"""
pass
def listdir_bld(self, parent_node, path, variant):
"""in this method we do not add timestamps but we remove them
when the files no longer exist (file removed in the build dir)"""
i_existing_nodes = [x for x in parent_node.childs.values() if x.id & 3 == Node.BUILD]
lst = set(Utils.listdir(path))
node_names = set([x.name for x in i_existing_nodes])
remove_names = node_names - lst
# remove the stamps of the build nodes that no longer exist on the filesystem
ids_to_remove = [x.id for x in i_existing_nodes if x.name in remove_names]
cache = self.node_sigs[variant]
for nid in ids_to_remove:
if nid in cache:
cache.__delitem__(nid)
def get_env(self):
return self.env_of_name('default')
def set_env(self, name, val):
self.all_envs[name] = val
env = property(get_env, set_env)
def add_manual_dependency(self, path, value):
if isinstance(path, Node.Node):
node = path
elif os.path.isabs(path):
node = self.root.find_resource(path)
else:
node = self.path.find_resource(path)
self.deps_man[node.id].append(value)
def launch_node(self):
"""return the launch directory as a node"""
# p_ln is kind of private, but public in case if
try:
return self.p_ln
except AttributeError:
self.p_ln = self.root.find_dir(Options.launch_dir)
return self.p_ln
def glob(self, pattern, relative=True):
"files matching the pattern, seen from the current folder"
path = self.path.abspath()
files = [self.root.find_resource(x) for x in glob.glob(path+os.sep+pattern)]
if relative:
files = [x.path_to_parent(self.path) for x in files if x]
else:
files = [x.abspath() for x in files if x]
return files
## the following methods are candidates for the stable apis ##
def add_group(self, *k):
self.task_manager.add_group(*k)
def set_group(self, *k, **kw):
self.task_manager.set_group(*k, **kw)
def hash_env_vars(self, env, vars_lst):
"""hash environment variables
['CXX', ..] -> [env['CXX'], ..] -> md5()"""
# ccroot objects use the same environment for building the .o at once
# the same environment and the same variables are used
idx = str(id(env)) + str(vars_lst)
try: return self.cache_sig_vars[idx]
except KeyError: pass
lst = [str(env[a]) for a in vars_lst]
ret = Utils.h_list(lst)
debug("envhash: %r %r" % (ret, lst))
# next time
self.cache_sig_vars[idx] = ret
return ret
def name_to_obj(self, name, env):
"""retrieve a task generator from its name or its target name
remember that names must be unique"""
cache = self.task_gen_cache_names
if not cache:
# create the index lazily
for x in self.all_task_gen:
vt = x.env.variant() + '_'
if x.name:
cache[vt + x.name] = x
else:
if isinstance(x.target, str):
target = x.target
else:
target = ' '.join(x.target)
v = vt + target
if not cache.get(v, None):
cache[v] = x
return cache.get(env.variant() + '_' + name, None)
def flush(self, all=1):
"""tell the task generators to create the tasks"""
self.ini = datetime.datetime.now()
# force the initialization of the mapping name->object in flush
# name_to_obj can be used in userland scripts, in that case beware of incomplete mapping
self.task_gen_cache_names = {}
self.name_to_obj('', self.env)
debug('build: delayed operation TaskGen.flush() called')
if Options.options.compile_targets:
debug('task_gen: posting objects listed in compile_targets')
# ensure the target names exist, fail before any post()
target_objects = Utils.DefaultDict(list)
for target_name in Options.options.compile_targets.split(','):
# trim target_name (handle cases when the user added spaces to targets)
target_name = target_name.strip()
for env in self.all_envs.values():
obj = self.name_to_obj(target_name, env)
if obj:
target_objects[target_name].append(obj)
if not target_name in target_objects and all:
raise Utils.WafError("target '%s' does not exist" % target_name)
to_compile = []
for x in target_objects.values():
for y in x:
to_compile.append(id(y))
# tasks must be posted in order of declaration
# we merely apply a filter to discard the ones we are not interested in
for i in xrange(len(self.task_manager.groups)):
g = self.task_manager.groups[i]
self.task_manager.current_group = i
for tg in g.tasks_gen:
if id(tg) in to_compile:
tg.post()
else:
debug('task_gen: posting objects (normal)')
ln = self.launch_node()
# if the build is started from the build directory, do as if it was started from the top-level
# for the pretty-printing (Node.py), the two lines below cannot be moved to Build::launch_node
if ln.is_child_of(self.bldnode) or not ln.is_child_of(self.srcnode):
ln = self.srcnode
# if the project file is located under the source directory, build all targets by default
# else 'waf configure build' does nothing
proj_node = self.root.find_dir(os.path.split(Utils.g_module.root_path)[0])
if proj_node.id != self.srcnode.id:
ln = self.srcnode
for i in xrange(len(self.task_manager.groups)):
g = self.task_manager.groups[i]
self.task_manager.current_group = i
for tg in g.tasks_gen:
if not tg.path.is_child_of(ln):
continue
tg.post()
def env_of_name(self, name):
try:
return self.all_envs[name]
except KeyError:
error('no such environment: '+name)
return None
def progress_line(self, state, total, col1, col2):
n = len(str(total))
Utils.rot_idx += 1
ind = Utils.rot_chr[Utils.rot_idx % 4]
ini = self.ini
pc = (100.*state)/total
eta = Utils.get_elapsed_time(ini)
fs = "[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s][" % (n, n, ind)
left = fs % (state, total, col1, pc, col2)
right = '][%s%s%s]' % (col1, eta, col2)
cols = Utils.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
if cols < 7: cols = 7
ratio = int((cols*state)/total) - 1
bar = ('='*ratio+'>').ljust(cols)
msg = Utils.indicator % (left, bar, right)
return msg
# do_install is not used anywhere
def do_install(self, src, tgt, chmod=O644):
"""returns true if the file was effectively installed or uninstalled, false otherwise"""
if self.is_install > 0:
if not Options.options.force:
# check if the file is already there to avoid a copy
try:
st1 = os.stat(tgt)
st2 = os.stat(src)
except OSError:
pass
else:
# same size and identical timestamps -> make no copy
if st1.st_mtime >= st2.st_mtime and st1.st_size == st2.st_size:
return False
srclbl = src.replace(self.srcnode.abspath(None)+os.sep, '')
info("* installing %s as %s" % (srclbl, tgt))
# following is for shared libs and stale inodes (-_-)
try: os.remove(tgt)
except OSError: pass
try:
shutil.copy2(src, tgt)
os.chmod(tgt, chmod)
except IOError:
try:
os.stat(src)
except (OSError, IOError):
error('File %r does not exist' % src)
raise Utils.WafError('Could not install the file %r' % tgt)
return True
elif self.is_install < 0:
info("* uninstalling %s" % tgt)
self.uninstall.append(tgt)
try:
os.remove(tgt)
except OSError, e:
if e.errno != errno.ENOENT:
if not getattr(self, 'uninstall_error', None):
self.uninstall_error = True
Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
if Logs.verbose > 1:
Logs.warn('could not remove %s (error code %r)' % (e.filename, e.errno))
return True
def get_install_path(self, path, env=None):
"installation path prefixed by the destdir, the variables like in '${PREFIX}/bin' are substituted"
if not env: env = self.env
destdir = env.get_destdir()
path = path.replace('/', os.sep)
destpath = Utils.subst_vars(path, env)
if destdir:
destpath = os.path.join(destdir, destpath.lstrip(os.sep))
return destpath
def install_files(self, path, files, env=None, chmod=O644, relative_trick=False, cwd=None):
"""To install files only after they have been built, put the calls in a method named
post_build on the top-level wscript
The files must be a list and contain paths as strings or as Nodes
The relative_trick flag can be set to install folders, use bld.path.ant_glob() with it
"""
if env:
assert isinstance(env, Environment.Environment), "invalid parameter"
else:
env = self.env
if not path: return []
if not cwd:
cwd = self.path
if isinstance(files, str) and '*' in files:
gl = cwd.abspath() + os.sep + files
lst = glob.glob(gl)
else:
lst = Utils.to_list(files)
destpath = self.get_install_path(path, env)
Utils.check_dir(destpath)
installed_files = []
for filename in lst:
if isinstance(filename, str) and os.path.isabs(filename):
alst = Utils.split_path(filename)
destfile = os.path.join(destpath, alst[-1])
else:
if isinstance(filename, Node.Node):
nd = filename
else:
nd = cwd.find_resource(filename)
if not nd:
raise Utils.WafError("Unable to install the file %r (not found in %s)" % (filename, cwd))
if relative_trick:
destfile = os.path.join(destpath, filename)
Utils.check_dir(os.path.dirname(destfile))
else:
destfile = os.path.join(destpath, nd.name)
filename = nd.abspath(env)
if self.do_install(filename, destfile, chmod):
installed_files.append(destfile)
return installed_files
def install_as(self, path, srcfile, env=None, chmod=O644, cwd=None):
"""
srcfile may be a string or a Node representing the file to install
returns True if the file was effectively installed, False otherwise
"""
if env:
assert isinstance(env, Environment.Environment), "invalid parameter"
else:
env = self.env
if not path:
raise Utils.WafError("where do you want to install %r? (%r?)" % (srcfile, path))
if not cwd:
cwd = self.path
destpath = self.get_install_path(path, env)
dir, name = os.path.split(destpath)
Utils.check_dir(dir)
# the source path
if isinstance(srcfile, Node.Node):
src = srcfile.abspath(env)
else:
src = srcfile
if not os.path.isabs(srcfile):
node = cwd.find_resource(srcfile)
if not node:
raise Utils.WafError("Unable to install the file %r (not found in %s)" % (srcfile, cwd))
src = node.abspath(env)
return self.do_install(src, destpath, chmod)
def symlink_as(self, path, src, env=None, cwd=None):
"""example: bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3') """
if sys.platform == 'win32':
# well, this *cannot* work
return
if not path:
raise Utils.WafError("where do you want to install %r? (%r?)" % (src, path))
tgt = self.get_install_path(path, env)
dir, name = os.path.split(tgt)
Utils.check_dir(dir)
if self.is_install > 0:
link = False
if not os.path.islink(tgt):
link = True
elif os.readlink(tgt) != src:
link = True
try: os.remove(tgt)
except OSError: pass
if link:
info('* symlink %s (-> %s)' % (tgt, src))
os.symlink(src, tgt)
return 0
else: # UNINSTALL
try:
info('* removing %s' % (tgt))
os.remove(tgt)
return 0
except OSError:
return 1
def exec_command(self, cmd, **kw):
# 'runner' zone is printed out for waf -v, see wafadmin/Options.py
debug('runner: system command -> %s' % cmd)
if self.log:
self.log.write('%s\n' % cmd)
kw['log'] = self.log
try:
if not kw.get('cwd', None):
kw['cwd'] = self.cwd
except AttributeError:
self.cwd = kw['cwd'] = self.bldnode.abspath()
return Utils.exec_command(cmd, **kw)
def printout(self, s):
f = self.log or sys.stderr
f.write(s)
f.flush()
def add_subdirs(self, dirs):
self.recurse(dirs, 'build')
def pre_recurse(self, name_or_mod, path, nexdir):
if not hasattr(self, 'oldpath'):
self.oldpath = []
self.oldpath.append(self.path)
self.path = self.root.find_dir(nexdir)
return {'bld': self, 'ctx': self}
def post_recurse(self, name_or_mod, path, nexdir):
self.path = self.oldpath.pop()
###### user-defined behaviour
def pre_build(self):
if hasattr(self, 'pre_funs'):
for m in self.pre_funs:
m(self)
def post_build(self):
if hasattr(self, 'post_funs'):
for m in self.post_funs:
m(self)
def add_pre_fun(self, meth):
try: self.pre_funs.append(meth)
except AttributeError: self.pre_funs = [meth]
def add_post_fun(self, meth):
try: self.post_funs.append(meth)
except AttributeError: self.post_funs = [meth]
def use_the_magic(self):
Task.algotype = Task.MAXPARALLEL
Task.file_deps = Task.extract_deps
install_as = group_method(install_as)
install_files = group_method(install_files)
symlink_as = group_method(symlink_as)

336
tools/wafadmin/Configure.py

@ -0,0 +1,336 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"""
Configuration system
A configuration instance is created when "waf configure" is called, it is used to:
* create data dictionaries (Environment instances)
* store the list of modules to import
The old model (copied from Scons) was to store logic (mapping file extensions to functions)
along with the data. In Waf a way was found to separate that logic by adding an indirection
layer (storing the names in the Environment instances)
In the new model, the logic is more object-oriented, and the user scripts provide the
logic. The data files (Environments) must contain configuration data only (flags, ..).
Note: the c/c++ related code is in the module config_c
"""
import os, shlex, sys, time
try: import cPickle
except ImportError: import pickle as cPickle
import Environment, Utils, Options
from Logs import warn
from Constants import *
conf_template = '''# project %(app)s configured on %(now)s by
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
# using %(args)s
#
'''
class ConfigurationError(Utils.WscriptError):
pass
autoconfig = False
"reconfigure the project automatically"
def find_file(filename, path_list):
"""find a file in a list of paths
@param filename: name of the file to search for
@param path_list: list of directories to search
@return: the first occurrence filename or '' if filename could not be found
"""
for directory in Utils.to_list(path_list):
if os.path.exists(os.path.join(directory, filename)):
return directory
return ''
def find_program_impl(env, filename, path_list=[], var=None, environ=None):
"""find a program in folders path_lst, and sets env[var]
@param env: environment
@param filename: name of the program to search for
@param path_list: list of directories to search for filename
@param var: environment value to be checked for in env or os.environ
@return: either the value that is referenced with [var] in env or os.environ
or the first occurrence filename or '' if filename could not be found
"""
if not environ:
environ = os.environ
try: path_list = path_list.split()
except AttributeError: pass
if var:
if env[var]: return env[var]
if var in environ: env[var] = environ[var]
if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
for y in [filename+x for x in ext.split(',')]:
for directory in path_list:
x = os.path.join(directory, y)
if os.path.isfile(x):
if var: env[var] = x
return x
return ''
class ConfigurationContext(Utils.Context):
tests = {}
error_handlers = []
def __init__(self, env=None, blddir='', srcdir=''):
self.env = None
self.envname = ''
self.environ = dict(os.environ)
self.line_just = 40
self.blddir = blddir
self.srcdir = srcdir
self.all_envs = {}
# curdir: necessary for recursion
self.cwd = self.curdir = os.getcwd()
self.tools = [] # tools loaded in the configuration, and that will be loaded when building
self.setenv(DEFAULT)
self.lastprog = ''
self.hash = 0
self.files = []
self.tool_cache = []
if self.blddir:
self.post_init()
def post_init(self):
self.cachedir = os.path.join(self.blddir, CACHE_DIR)
path = os.path.join(self.blddir, WAF_CONFIG_LOG)
try: os.unlink(path)
except (OSError, IOError): pass
try:
self.log = open(path, 'w')
except (OSError, IOError):
self.fatal('could not open %r for writing' % path)
app = getattr(Utils.g_module, 'APPNAME', '')
if app:
ver = getattr(Utils.g_module, 'VERSION', '')
if ver:
app = "%s (%s)" % (app, ver)
now = time.ctime()
pyver = sys.hexversion
systype = sys.platform
args = " ".join(sys.argv)
wafver = WAFVERSION
abi = ABI
self.log.write(conf_template % vars())
def __del__(self):
"""cleanup function: close config.log"""
# may be ran by the gc, not always after initialization
if hasattr(self, 'log') and self.log:
self.log.close()
def fatal(self, msg):
raise ConfigurationError(msg)
def check_tool(self, input, tooldir=None, funs=None):
"load a waf tool"
tools = Utils.to_list(input)
if tooldir: tooldir = Utils.to_list(tooldir)
for tool in tools:
tool = tool.replace('++', 'xx')
if tool == 'java': tool = 'javaw'
# avoid loading the same tool more than once with the same functions
# used by composite projects
mag = (tool, id(self.env), funs)
if mag in self.tool_cache:
continue
self.tool_cache.append(mag)
module = Utils.load_tool(tool, tooldir)
func = getattr(module, 'detect', None)
if func:
if type(func) is type(find_file): func(self)
else: self.eval_rules(funs or func)
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
def sub_config(self, k):
"executes the configure function of a wscript module"
self.recurse(k, name='configure')
def pre_recurse(self, name_or_mod, path, nexdir):
return {'conf': self, 'ctx': self}
def post_recurse(self, name_or_mod, path, nexdir):
if not autoconfig:
return
self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
self.files.append(path)
def store(self, file=''):
"save the config results into the cache file"
if not os.path.isdir(self.cachedir):
os.makedirs(self.cachedir)
if not file:
file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
file.write('version = 0x%x\n' % HEXVERSION)
file.write('tools = %r\n' % self.tools)
file.close()
if not self.all_envs:
self.fatal('nothing to store in the configuration context!')
for key in self.all_envs:
tmpenv = self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
def set_env_name(self, name, env):
"add a new environment called name"
self.all_envs[name] = env
return env
def retrieve(self, name, fromenv=None):
"retrieve an environment called name"
try:
env = self.all_envs[name]
except KeyError:
env = Environment.Environment()
env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
self.all_envs[name] = env
else:
if fromenv: warn("The environment %s may have been configured already" % name)
return env
def setenv(self, name):
"enable the environment called name"
self.env = self.retrieve(name)
self.envname = name
def add_os_flags(self, var, dest=None):
# do not use 'get' to make certain the variable is not defined
try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
except KeyError: pass
def check_message_1(self, sr):
self.line_just = max(self.line_just, len(sr))
self.log.write(sr + '\n\n')
Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
def check_message_2(self, sr, color='GREEN'):
Utils.pprint(color, sr)
def check_message(self, th, msg, state, option=''):
sr = 'Checking for %s %s' % (th, msg)
self.check_message_1(sr)
p = self.check_message_2
if state: p('ok ' + option)
else: p('not found', 'YELLOW')
# FIXME remove in waf 1.6
# the parameter 'option' is not used (kept for compatibility)
def check_message_custom(self, th, msg, custom, option='', color='PINK'):
sr = 'Checking for %s %s' % (th, msg)
self.check_message_1(sr)
self.check_message_2(custom, color)
def find_program(self, filename, path_list=[], var=None, mandatory=False):
"wrapper that adds a configuration message"
ret = None
if var:
if self.env[var]:
ret = self.env[var]
elif var in os.environ:
ret = os.environ[var]
if not isinstance(filename, list): filename = [filename]
if not ret:
for x in filename:
ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
if ret: break
self.check_message('program', ','.join(filename), ret, ret)
self.log.write('find program=%r paths=%r var=%r -> %r\n\n' % (filename, path_list, var, ret))
if not ret and mandatory:
self.fatal('The program %r could not be found' % filename)
if var:
self.env[var] = ret
return ret
def cmd_to_list(self, cmd):
"commands may be written in pseudo shell like 'ccache g++'"
if isinstance(cmd, str) and cmd.find(' '):
try:
os.stat(cmd)
except OSError:
return shlex.split(cmd)
else:
return [cmd]
return cmd
def __getattr__(self, name):
r = self.__class__.__dict__.get(name, None)
if r: return r
if name and name.startswith('require_'):
for k in ['check_', 'find_']:
n = name.replace('require_', k)
ret = self.__class__.__dict__.get(n, None)
if ret:
def run(*k, **kw):
r = ret(self, *k, **kw)
if not r:
self.fatal('requirement failure')
return r
return run
self.fatal('No such method %r' % name)
def eval_rules(self, rules):
self.rules = Utils.to_list(rules)
for x in self.rules:
f = getattr(self, x)
if not f: self.fatal("No such method '%s'." % x)
try:
f()
except Exception, e:
ret = self.err_handler(x, e)
if ret == BREAK:
break
elif ret == CONTINUE:
continue
else:
self.fatal(e)
def err_handler(self, fun, error):
pass
def conf(f):
"decorator: attach new configuration functions"
setattr(ConfigurationContext, f.__name__, f)
return f
def conftest(f):
"decorator: attach new configuration tests (registered as strings)"
ConfigurationContext.tests[f.__name__] = f
return conf(f)

76
tools/wafadmin/Constants.py

@ -0,0 +1,76 @@
#!/usr/bin/env python
# encoding: utf-8
# Yinon dot me gmail 2008
"""
these constants are somewhat public, try not to mess them
maintainer: the version number is updated from the top-level wscript file
"""
# do not touch these three lines, they are updated automatically
HEXVERSION = 0x10509
WAFVERSION="1.5.9"
WAFREVISION = "6626:6639M"
ABI = 7
# permissions
O644 = 420
O755 = 493
MAXJOBS = 99999999
CACHE_DIR = 'c4che'
CACHE_SUFFIX = '.cache.py'
DBFILE = '.wafpickle-%d' % ABI
WSCRIPT_FILE = 'wscript'
WSCRIPT_BUILD_FILE = 'wscript_build'
WAF_CONFIG_LOG = 'config.log'
WAF_CONFIG_H = 'config.h'
SIG_NIL = 'iluvcuteoverload'
VARIANT = '_VARIANT_'
DEFAULT = 'default'
SRCDIR = 'srcdir'
BLDDIR = 'blddir'
APPNAME = 'APPNAME'
VERSION = 'VERSION'
DEFINES = 'defines'
UNDEFINED = ()
BREAK = "break"
CONTINUE = "continue"
# task scheduler options
JOBCONTROL = "JOBCONTROL"
MAXPARALLEL = "MAXPARALLEL"
NORMAL = "NORMAL"
# task state
NOT_RUN = 0
MISSING = 1
CRASHED = 2
EXCEPTION = 3
SKIPPED = 8
SUCCESS = 9
ASK_LATER = -1
SKIP_ME = -2
RUN_ME = -3
LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
HOUR_FORMAT = "%H:%M:%S"
TEST_OK = True
CFG_FILES = 'cfg_files'
# positive '->' install
# negative '<-' uninstall
INSTALL = 1337
UNINSTALL = -1337

205
tools/wafadmin/Environment.py

@ -0,0 +1,205 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""Environment representation
There is one gotcha: getitem returns [] if the contents evals to False
This means env['foo'] = {}; print env['foo'] will print [] not {}
"""
import os, copy, re
import Logs, Options, Utils
from Constants import *
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
class Environment(object):
"""A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
An environment instance can be stored into a file and loaded easily
"""
__slots__ = ("table", "parent")
def __init__(self, filename=None):
self.table = {}
#self.parent = None
if filename:
self.load(filename)
def __contains__(self, key):
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
def __str__(self):
keys = set()
cur = self
while cur:
keys.update(cur.table.keys())
cur = getattr(cur, 'parent', None)
keys = list(keys)
keys.sort()
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
def __getitem__(self, key):
try:
while 1:
x = self.table.get(key, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return []
def __setitem__(self, key, value):
self.table[key] = value
def __delitem__(self, key, value):
del self.table[key]
def set_variant(self, name):
self.table[VARIANT] = name
def variant(self):
try:
while 1:
x = self.table.get(VARIANT, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return DEFAULT
def copy(self):
# TODO waf 1.6 rename this method derive, #368
newenv = Environment()
newenv.parent = self
return newenv
def detach(self):
"""TODO try it
modifying the original env will not change the copy"""
tbl = self.get_merged_dict()
try:
delattr(self, 'parent')
except AttributeError:
pass
else:
keys = tbl.keys()
for x in keys:
tbl[x] = copy.copy(tbl[x])
self.table = tbl
def get_flat(self, key):
s = self[key]
if isinstance(s, str): return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
"""Gets a value that must be a list for further modification. The
list may be modified inplace and there is no need to
"self.table[var] = value" afterwards.
"""
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
else:
value = [value]
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
return value
def append_value(self, var, value):
current_value = self._get_list_value_for_modification(var)
if isinstance(value, list):
current_value.extend(value)
else:
current_value.append(value)
def prepend_value(self, var, value):
current_value = self._get_list_value_for_modification(var)
if isinstance(value, list):
current_value = value + current_value
# a new list: update the dictionary entry
self.table[var] = current_value
else:
current_value.insert(0, value)
# prepend unique would be ambiguous
def append_unique(self, var, value):
current_value = self._get_list_value_for_modification(var)
if isinstance(value, list):
for value_item in value:
if value_item not in current_value:
current_value.append(value_item)
else:
if value not in current_value:
current_value.append(value)
def get_merged_dict(self):
"""compute a merged table"""
table_list = []
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
merged_table = {}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self, filename):
"Write the variables into a file"
file = open(filename, 'w')
merged_table = self.get_merged_dict()
keys = list(merged_table.keys())
keys.sort()
for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
file.close()
def load(self, filename):
"Retrieve the variables from a file"
tbl = self.table
code = Utils.readf(filename)
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
def get_destdir(self):
"return the destdir, useful for installing"
if self.__getitem__('NOINSTALL'): return ''
return Options.options.destdir
def update(self, d):
for k, v in d.iteritems():
self[k] = v
def __getattr__(self, name):
if name in self.__slots__:
return object.__getattr__(self, name)
else:
return self[name]
def __setattr__(self, name, value):
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
self[name] = value
def __detattr__(self, name):
if name in self.__slots__:
object.__detattr__(self, name)
else:
del self[name]

128
tools/wafadmin/Logs.py

@ -0,0 +1,128 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
import os, re, logging, traceback, sys
from Constants import *
zones = ''
verbose = 0
colors_lst = {
'USE' : True,
'BOLD' :'\x1b[01;1m',
'RED' :'\x1b[01;91m',
'GREEN' :'\x1b[32m',
'YELLOW':'\x1b[33m',
'PINK' :'\x1b[35m',
'BLUE' :'\x1b[01;34m',
'CYAN' :'\x1b[36m',
'NORMAL':'\x1b[0m',
'cursor_on' :'\x1b[?25h',
'cursor_off' :'\x1b[?25l',
}
got_tty = not os.environ.get('TERM', 'dumb') in ['dumb', 'emacs']
if got_tty:
try:
got_tty = sys.stderr.isatty()
except AttributeError:
got_tty = False
import Utils
if not got_tty or sys.platform == 'win32' or 'NOCOLOR' in os.environ:
colors_lst['USE'] = False
def get_color(cl):
if not colors_lst['USE']: return ''
return colors_lst.get(cl, '')
class foo(object):
def __getattr__(self, a):
return get_color(a)
def __call__(self, a):
return get_color(a)
colors = foo()
re_log = re.compile(r'(\w+): (.*)', re.M)
class log_filter(logging.Filter):
def __init__(self, name=None):
pass
def filter(self, rec):
rec.c1 = colors.PINK
rec.c2 = colors.NORMAL
rec.zone = rec.module
if rec.levelno >= logging.INFO:
if rec.levelno >= logging.ERROR:
rec.c1 = colors.RED
elif rec.levelno >= logging.WARNING:
rec.c1 = colors.YELLOW
else:
rec.c1 = colors.GREEN
return True
zone = ''
m = re_log.match(rec.msg)
if m:
zone = rec.zone = m.group(1)
rec.msg = m.group(2)
if zones:
return getattr(rec, 'zone', '') in zones or '*' in zones
elif not verbose > 2:
return False
return True
class formatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
def format(self, rec):
if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
try:
return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
except:
return rec.c1+rec.msg+rec.c2
return logging.Formatter.format(self, rec)
def debug(msg):
if verbose:
# FIXME why does it eat the newlines????
msg = msg.replace('\n', ' ')
logging.debug(msg)
def error(msg):
logging.error(msg)
if verbose > 1:
if isinstance(msg, Utils.WafError):
st = msg.stack
else:
st = traceback.extract_stack()
if st:
st = st[:-1]
buf = []
for filename, lineno, name, line in st:
buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
if line:
buf.append(' %s' % line.strip())
if buf: logging.error("\n".join(buf))
warn = logging.warn
info = logging.info
def init_log():
log = logging.getLogger()
log.handlers = []
log.filters = []
hdlr = logging.StreamHandler()
hdlr.setFormatter(formatter())
log.addHandler(hdlr)
log.addFilter(log_filter())
log.setLevel(logging.DEBUG)
# may be initialized more than once
init_log()

660
tools/wafadmin/Node.py

@ -0,0 +1,660 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""
Node: filesystem structure, contains lists of nodes
IMPORTANT:
1. Each file/folder is represented by exactly one node
2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
unused class members increase the .wafpickle file size sensibly with lots of objects
3. The build is launched from the top of the build dir (for example, in _build_/)
4. Node should not be instantiated directly.
Each instance of Build.BuildContext has a Node sublass.
(aka: 'Nodu', see BuildContext initializer)
The BuildContext is referenced here as self.__class__.bld
Its Node class is referenced here as self.__class__
The public and advertised apis are the following:
${TGT} -> dir/to/file.ext
${TGT[0].base()} -> dir/to/file
${TGT[0].dir(env)} -> dir/to
${TGT[0].file()} -> file.ext
${TGT[0].file_base()} -> file
${TGT[0].suffix()} -> .ext
${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
"""
import os, sys, fnmatch, re
import Utils
UNDEFINED = 0
DIR = 1
FILE = 2
BUILD = 3
type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
# These fnmatch expressions are used by default to prune the directory tree
# while doing the recursive traversal in the find_iter method of the Node class.
prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
# These fnmatch expressions are used by default to exclude files and dirs
# while doing the recursive traversal in the find_iter method of the Node class.
exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
# while doing the recursive traversal in the ant_glob method of the Node class.
exclude_regs = '''
**/*~
**/#*#
**/.#*
**/%*%
**/._*
**/CVS
**/CVS/**
**/.cvsignore
**/SCCS
**/SCCS/**
**/vssver.scc
**/.svn
**/.svn/**
**/.git
**/.git/**
**/.gitignore
**/.bzr
**/.bzr/**
**/.hg
**/.hg/**
**/_MTN
**/_MTN/**
**/_darcs
**/_darcs/**
**/.DS_Store'''
class Node(object):
__slots__ = ("name", "parent", "id", "childs")
def __init__(self, name, parent, node_type = UNDEFINED):
self.name = name
self.parent = parent
# assumption: one build object at a time
self.__class__.bld.id_nodes += 4
self.id = self.__class__.bld.id_nodes + node_type
if node_type == DIR: self.childs = {}
# We do not want to add another type attribute (memory)
# use the id to find out: type = id & 3
# for setting: new type = type + x - type & 3
if parent and name in parent.childs:
raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
if parent: parent.childs[name] = self
def __setstate__(self, data):
if len(data) == 4:
(self.parent, self.name, self.id, self.childs) = data
else:
(self.parent, self.name, self.id) = data
def __getstate__(self):
if getattr(self, 'childs', None) is None:
return (self.parent, self.name, self.id)
else:
return (self.parent, self.name, self.id, self.childs)
def __str__(self):
if not self.parent: return ''
return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
def __repr__(self):
return self.__str__()
def __hash__(self):
"expensive, make certain it is not used"
raise Utils.WafError('nodes, you are doing it wrong')
def __copy__(self):
"nodes are not supposed to be copied"
raise Utils.WafError('nodes are not supposed to be cloned')
def get_type(self):
return self.id & 3
def set_type(self, t):
self.id = self.id + t - self.id & 3
def dirs(self):
return [x for x in self.childs.values() if x.id & 3 == DIR]
def files(self):
return [x for x in self.childs.values() if x.id & 3 == FILE]
def get_dir(self, name, default=None):
node = self.childs.get(name, None)
if not node or node.id & 3 != DIR: return default
return node
def get_file(self, name, default=None):
node = self.childs.get(name, None)
if not node or node.id & 3 != FILE: return default
return node
def get_build(self, name, default=None):
node = self.childs.get(name, None)
if not node or node.id & 3 != BUILD: return default
return node
def find_resource(self, lst):
"Find an existing input file: either a build node declared previously or a source node"
if isinstance(lst, str):
lst = Utils.split_path(lst)
if len(lst) == 1:
parent = self
else:
parent = self.find_dir(lst[:-1])
if not parent: return None
self.__class__.bld.rescan(parent)
name = lst[-1]
node = parent.childs.get(name, None)
if node:
tp = node.id & 3
if tp == FILE or tp == BUILD:
return node
else:
return None
tree = self.__class__.bld
if not name in tree.cache_dir_contents[parent.id]:
return None
path = parent.abspath() + os.sep + name
try:
st = Utils.h_file(path)
except IOError:
return None
child = self.__class__(name, parent, FILE)
tree.node_sigs[0][child.id] = st
return child
def find_or_declare(self, lst):
"Used for declaring a build node representing a file being built"
if isinstance(lst, str):
lst = Utils.split_path(lst)
if len(lst) == 1:
parent = self
else:
parent = self.find_dir(lst[:-1])
if not parent: return None
self.__class__.bld.rescan(parent)
name = lst[-1]
node = parent.childs.get(name, None)
if node:
tp = node.id & 3
if tp != BUILD:
raise Utils.WafError("find_or_declare returns a build node, not a source nor a directory %r" % lst)
return node
node = self.__class__(name, parent, BUILD)
return node
def find_dir(self, lst):
"search a folder in the filesystem"
if isinstance(lst, str):
lst = Utils.split_path(lst)
current = self
for name in lst:
self.__class__.bld.rescan(current)
prev = current
if not current.parent and name == current.name:
continue
elif not name:
continue
elif name == '.':
continue
elif name == '..':
current = current.parent or current
else:
current = prev.childs.get(name, None)
if current is None:
dir_cont = self.__class__.bld.cache_dir_contents
if prev.id in dir_cont and name in dir_cont[prev.id]:
if not prev.name:
if os.sep == '/':
# cygwin //machine/share
dirname = os.sep + name
else:
# windows c:
dirname = name
else:
# regular path
dirname = prev.abspath() + os.sep + name
if not os.path.isdir(dirname):
return None
current = self.__class__(name, prev, DIR)
elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
# drive letter or \\ path for windows
current = self.__class__(name, prev, DIR)
else:
return None
else:
if current.id & 3 != DIR:
return None
return current
# FIXME: remove in waf 1.6 ?
def ensure_dir_node_from_path(self, lst):
"used very rarely, force the construction of a branch of node instance for representing folders"
if isinstance(lst, str):
lst = Utils.split_path(lst)
current = self
for name in lst:
if not name:
continue
elif name == '.':
continue
elif name == '..':
current = current.parent or current
else:
prev = current
current = prev.childs.get(name, None)
if current is None:
current = self.__class__(name, prev, DIR)
return current
# FIXME: remove in waf 1.6
def exclusive_build_node(self, path):
"""
create a hierarchy in the build dir (no source folders) for ill-behaving compilers
the node is not hashed, so you must do it manually
after declaring such a node, find_dir and find_resource should work as expected
"""
lst = Utils.split_path(path)
name = lst[-1]
if len(lst) > 1:
parent = None
try:
parent = self.find_dir(lst[:-1])
except OSError:
pass
if not parent:
parent = self.ensure_dir_node_from_path(lst[:-1])
self.__class__.bld.rescan(parent)
else:
try:
self.__class__.bld.rescan(parent)
except OSError:
pass
else:
parent = self
node = parent.childs.get(name, None)
if not node:
node = self.__class__(name, parent, BUILD)
return node
def path_to_parent(self, parent):
"path relative to a direct ancestor, as string"
lst = []
p = self
h1 = parent.height()
h2 = p.height()
while h2 > h1:
h2 -= 1
lst.append(p.name)
p = p.parent
if lst:
lst.reverse()
ret = os.path.join(*lst)
else:
ret = ''
return ret
def find_ancestor(self, node):
"find a common ancestor for two nodes - for the shortest path in hierarchy"
dist = self.height() - node.height()
if dist < 0: return node.find_ancestor(self)
# now the real code
cand = self
while dist > 0:
cand = cand.parent
dist -= 1
if cand == node: return cand
cursor = node
while cand.parent:
cand = cand.parent
cursor = cursor.parent
if cand == cursor: return cand
def relpath_gen(self, from_node):
"string representing a relative path between self to another node"
if self == from_node: return '.'
if from_node.parent == self: return '..'
# up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
ancestor = self.find_ancestor(from_node)
lst = []
cand = self
while not cand.id == ancestor.id:
lst.append(cand.name)
cand = cand.parent
cand = from_node
while not cand.id == ancestor.id:
lst.append('..')
cand = cand.parent
lst.reverse()
return os.sep.join(lst)
def nice_path(self, env=None):
"printed in the console, open files easily from the launch directory"
tree = self.__class__.bld
ln = tree.launch_node()
if self.id & 3 == FILE: return self.relpath_gen(ln)
else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
def is_child_of(self, node):
"does this node belong to the subtree node"
p = self
diff = self.height() - node.height()
while diff > 0:
diff -= 1
p = p.parent
return p.id == node.id
def variant(self, env):
"variant, or output directory for this node, a source has for variant 0"
if not env: return 0
elif self.id & 3 == FILE: return 0
else: return env.variant()
def height(self):
"amount of parents"
# README a cache can be added here if necessary
d = self
val = -1
while d:
d = d.parent
val += 1
return val
# helpers for building things
def abspath(self, env=None):
"""
absolute path
@param env [Environment]:
* obligatory for build nodes: build/variant/src/dir/bar.o
* optional for dirs: get either src/dir or build/variant/src/dir
* excluded for source nodes: src/dir/bar.c
Instead of computing the absolute path each time again,
store the already-computed absolute paths in one of (variants+1) dictionaries:
bld.cache_node_abspath[0] holds absolute paths for source nodes.
bld.cache_node_abspath[variant] holds the absolute path for the build nodes
which reside in the variant given by env.
"""
## absolute path - hot zone, so do not touch
# less expensive
variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
if ret: return ret
if not variant:
# source directory
if not self.parent:
val = os.sep == '/' and os.sep or ''
elif not self.parent.name: # root
val = (os.sep == '/' and os.sep or '') + self.name
else:
val = self.parent.abspath() + os.sep + self.name
else:
# build directory
val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
self.__class__.bld.cache_node_abspath[variant][self.id] = val
return val
def change_ext(self, ext):
"node of the same path, but with a different extension - hot zone so do not touch"
name = self.name
k = name.rfind('.')
if k >= 0:
name = name[:k] + ext
else:
name = name + ext
return self.parent.find_or_declare([name])
def src_dir(self, env):
"src path without the file name"
return self.parent.srcpath(env)
def bld_dir(self, env):
"build path without the file name"
return self.parent.bldpath(env)
def bld_base(self, env):
"build path without the extension: src/dir/foo(.cpp)"
s = os.path.splitext(self.name)[0]
return os.path.join(self.bld_dir(env), s)
def bldpath(self, env=None):
"path seen from the build dir default/src/foo.cpp"
if self.id & 3 == FILE:
return self.relpath_gen(self.__class__.bld.bldnode)
if self.path_to_parent(self.__class__.bld.srcnode) is not '':
return os.path.join(env.variant(), self.path_to_parent(self.__class__.bld.srcnode))
return env.variant()
def srcpath(self, env=None):
"path in the srcdir from the build dir ../src/foo.cpp"
if self.id & 3 == BUILD:
return self.bldpath(env)
return self.relpath_gen(self.__class__.bld.bldnode)
def read(self, env):
"get the contents of a file, it is not used anywhere for the moment"
return Utils.readf(self.abspath(env))
def dir(self, env):
"scons-like"
return self.parent.abspath(env)
def file(self):
"scons-like"
return self.name
def file_base(self):
"scons-like"
return os.path.splitext(self.name)[0]
def suffix(self):
"scons-like - hot zone so do not touch"
k = max(0, self.name.rfind('.'))
return self.name[k:]
def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
"find nodes in the filesystem hierarchy, try to instanciate the nodes passively"
bld_ctx = self.__class__.bld
bld_ctx.rescan(self)
for name in bld_ctx.cache_dir_contents[self.id]:
if accept_name(self, name):
node = self.find_resource(name)
if node:
if src and node.id & 3 == FILE:
yield node
else:
node = self.find_dir(name)
if node and node.id != bld_ctx.bldnode.id:
if dir:
yield node
if not is_prune(self, name):
if maxdepth:
for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
yield k
else:
if not is_prune(self, name):
node = self.find_resource(name)
if not node:
# not a file, it is a dir
node = self.find_dir(name)
if node and node.id != bld_ctx.bldnode.id:
if maxdepth:
for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
yield k
if bld:
for node in self.childs.values():
if node.id == bld_ctx.bldnode.id:
continue
if node.id & 3 == BUILD:
if accept_name(self, node.name):
yield node
raise StopIteration
def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
"find nodes recursively, this returns everything but folders by default"
if not (src or bld or dir):
raise StopIteration
if self.id & 3 != DIR:
raise StopIteration
in_pat = Utils.to_list(in_pat)
ex_pat = Utils.to_list(ex_pat)
prune_pat = Utils.to_list(prune_pat)
def accept_name(node, name):
for pat in ex_pat:
if fnmatch.fnmatchcase(name, pat):
return False
for pat in in_pat:
if fnmatch.fnmatchcase(name, pat):
return True
return False
def is_prune(node, name):
for pat in prune_pat:
if fnmatch.fnmatchcase(name, pat):
return True
return False
ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
if flat:
return " ".join([x.relpath_gen(self) for x in ret])
return ret
def ant_glob(self, *k, **kw):
src=kw.get('src', 1)
bld=kw.get('bld', 1)
dir=kw.get('dir', 0)
excl = kw.get('excl', exclude_regs)
incl = k and k[0] or kw.get('incl', '**')
def to_pat(s):
lst = Utils.to_list(s)
ret = []
for x in lst:
x = x.replace('//', '/')
if x.endswith('/'):
x += '**'
lst2 = x.split('/')
accu = []
for k in lst2:
if k == '**':
accu.append(k)
else:
k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
k = '^%s$' % k
#print "pattern", k
accu.append(re.compile(k))
ret.append(accu)
return ret
def filtre(name, nn):
ret = []
for lst in nn:
if not lst:
pass
elif lst[0] == '**':
ret.append(lst)
if len(lst) > 1:
if lst[1].match(name):
ret.append(lst[2:])
else:
ret.append([])
elif lst[0].match(name):
ret.append(lst[1:])
return ret
def accept(name, pats):
nacc = filtre(name, pats[0])
nrej = filtre(name, pats[1])
if [] in nrej:
nacc = []
return [nacc, nrej]
def ant_iter(nodi, maxdepth=25, pats=[]):
nodi.__class__.bld.rescan(nodi)
for name in nodi.__class__.bld.cache_dir_contents[nodi.id]:
npats = accept(name, pats)
if npats and npats[0]:
accepted = [] in npats[0]
#print accepted, nodi, name
node = nodi.find_resource(name)
if node and accepted:
if src and node.id & 3 == FILE:
yield node
else:
node = nodi.find_dir(name)
if node and node.id != nodi.__class__.bld.bldnode.id:
if accepted and dir:
yield node
if maxdepth:
for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
yield k
if bld:
for node in nodi.childs.values():
if node.id == nodi.__class__.bld.bldnode.id:
continue
if node.id & 3 == BUILD:
npats = accept(node.name, pats)
if npats and npats[0] and [] in npats[0]:
yield node
raise StopIteration
ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
if kw.get('flat', True):
return " ".join([x.relpath_gen(self) for x in ret])
return ret
class Nodu(Node):
pass

273
tools/wafadmin/Options.py

@ -0,0 +1,273 @@
#!/usr/bin/env python
# encoding: utf-8
# Scott Newton, 2005 (scottn)
# Thomas Nagy, 2006 (ita)
"Custom command-line options"
import os, sys, imp, types, tempfile, optparse
import Logs, Utils
from Constants import *
cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
# TODO remove in waf 1.6 the following two
commands = {}
is_install = False
options = {}
arg_line = []
launch_dir = ''
tooldir = ''
lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
except KeyError: cache_global = ''
platform = Utils.unversioned_sys_platform()
conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
default_prefix = os.environ.get('PREFIX')
if not default_prefix:
if platform == 'win32': default_prefix = tempfile.gettempdir()
else: default_prefix = '/usr/local/'
default_jobs = os.environ.get('JOBS', -1)
if default_jobs < 1:
try:
if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
else:
default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
except:
if os.name == 'java': # platform.system() == 'Java'
from java.lang import Runtime
default_jobs = Runtime.getRuntime().availableProcessors()
else:
# environment var defined on win32
default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
default_destdir = os.environ.get('DESTDIR', '')
def get_usage(self):
cmds_str = []
module = Utils.g_module
if module:
# create the help messages for commands
tbl = module.__dict__
keys = list(tbl.keys())
keys.sort()
if 'build' in tbl:
if not module.build.__doc__:
module.build.__doc__ = 'builds the project'
if 'configure' in tbl:
if not module.configure.__doc__:
module.configure.__doc__ = 'configures the project'
ban = ['set_options', 'init', 'shutdown']
optlst = [x for x in keys if not x in ban
and type(tbl[x]) is type(parse_args_impl)
and tbl[x].__doc__
and not x.startswith('_')]
just = max([len(x) for x in optlst])
for x in optlst:
cmds_str.append(' %s: %s' % (x.ljust(just), tbl[x].__doc__))
ret = '\n'.join(cmds_str)
else:
ret = ' '.join(cmds)
return '''waf [command] [options]
Main commands (example: ./waf build -j4)
%s
''' % ret
setattr(optparse.OptionParser, 'get_usage', get_usage)
def create_parser(module=None):
Logs.debug('options: create_parser is called')
parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
parser.formatter.width = Utils.get_term_cols()
p = parser.add_option
p('-j', '--jobs',
type = 'int',
default = default_jobs,
help = 'amount of parallel jobs (%r)' % default_jobs,
dest = 'jobs')
p('-k', '--keep',
action = 'store_true',
default = False,
help = 'keep running happily on independent task groups',
dest = 'keep')
p('-v', '--verbose',
action = 'count',
default = 0,
help = 'verbosity level -v -vv or -vvv [default: 0]',
dest = 'verbose')
p('--nocache',
action = 'store_true',
default = False,
help = 'ignore the WAFCACHE (if set)',
dest = 'nocache')
p('--zones',
action = 'store',
default = '',
help = 'debugging zones (task_gen, deps, tasks, etc)',
dest = 'zones')
p('-p', '--progress',
action = 'count',
default = 0,
help = '-p: progress bar; -pp: ide output',
dest = 'progress_bar')
p('--targets',
action = 'store',
default = '',
help = 'build given task generators, e.g. "target1,target2"',
dest = 'compile_targets')
gr = optparse.OptionGroup(parser, 'configuration options')
parser.add_option_group(gr)
gr.add_option('-b', '--blddir',
action = 'store',
default = '',
help = 'build dir for the project (configuration)',
dest = 'blddir')
gr.add_option('-s', '--srcdir',
action = 'store',
default = '',
help = 'src dir for the project (configuration)',
dest = 'srcdir')
gr.add_option('--prefix',
help = 'installation prefix (configuration) [default: %r]' % default_prefix,
default = default_prefix,
dest = 'prefix')
gr = optparse.OptionGroup(parser, 'installation options')
parser.add_option_group(gr)
gr.add_option('--destdir',
help = 'installation root [default: %r]' % default_destdir,
default = default_destdir,
dest = 'destdir')
gr.add_option('-f', '--force',
action = 'store_true',
default = False,
help = 'force file installation',
dest = 'force')
return parser
def parse_args_impl(parser, _args=None):
global options, commands, arg_line
(options, args) = parser.parse_args(args=_args)
arg_line = args
#arg_line = args[:] # copy
# By default, 'waf' is equivalent to 'waf build'
commands = {}
for var in cmds: commands[var] = 0
if not args:
commands['build'] = 1
args.append('build')
# Parse the command arguments
for arg in args:
commands[arg] = True
# the check thing depends on the build
if 'check' in args:
idx = args.index('check')
try:
bidx = args.index('build')
if bidx > idx:
raise ValueError('build before check')
except ValueError, e:
args.insert(idx, 'build')
if args[0] != 'init':
args.insert(0, 'init')
# TODO -k => -j0
if options.keep: options.jobs = 1
if options.jobs < 1: options.jobs = 1
if 'install' in sys.argv or 'uninstall' in sys.argv:
# absolute path only if set
options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
Logs.verbose = options.verbose
Logs.init_log()
if options.zones:
Logs.zones = options.zones.split(',')
if not Logs.verbose: Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']
if Logs.verbose > 2:
Logs.zones = ['*']
# TODO waf 1.6
# 1. rename the class to OptionsContext
# 2. instead of a class attribute, use a module (static 'parser')
# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
class Handler(Utils.Context):
"""loads wscript modules in folders for adding options
This class should be named 'OptionsContext'
A method named 'recurse' is bound when used by the module Scripting"""
parser = None
# make it possible to access the reference, like Build.bld
def __init__(self, module=None):
self.parser = create_parser(module)
self.cwd = os.getcwd()
Handler.parser = self
def add_option(self, *k, **kw):
self.parser.add_option(*k, **kw)
def add_option_group(self, *k, **kw):
return self.parser.add_option_group(*k, **kw)
def get_option_group(self, opt_str):
return self.parser.get_option_group(opt_str)
def sub_options(self, *k, **kw):
if not k: raise Utils.WscriptError('folder expected')
self.recurse(k[0], name='set_options')
def tool_options(self, *k, **kw):
Utils.python_24_guard()
if not k[0]:
raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
tools = Utils.to_list(k[0])
# TODO waf 1.6 remove the global variable tooldir
path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
for tool in tools:
tool = tool.replace('++', 'xx')
module = Utils.load_tool(tool, path)
try:
fun = module.set_options
except AttributeError:
pass
else:
fun(kw.get('option_group', self))
def parse_args(self, args=None):
parse_args_impl(self.parser, args)

219
tools/wafadmin/Runner.py

@ -0,0 +1,219 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"Execute the tasks"
import sys, random, time, threading, traceback
try: from Queue import Queue
except ImportError: from queue import Queue
import Build, Utils, Logs, Options
from Logs import debug, error
from Constants import *
GAP = 15
run_old = threading.Thread.run
def run(*args, **kwargs):
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
threading.Thread.run = run
class TaskConsumer(threading.Thread):
def __init__(self, m):
threading.Thread.__init__(self)
self.setDaemon(1)
self.master = m
self.start()
def run(self):
try:
self.loop()
except:
pass
def loop(self):
m = self.master
while 1:
tsk = m.ready.get()
if m.stop:
m.out.put(tsk)
continue
try:
tsk.generator.bld.printout(tsk.display())
if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
# actual call to task's run() function
else: ret = tsk.call_run()
except Exception, e:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
# TODO cleanup
m.error_handler(tsk)
m.out.put(tsk)
continue
if ret:
tsk.err_code = ret
tsk.hasrun = CRASHED
else:
try:
tsk.post_run()
except Utils.WafError:
pass
except Exception:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
else:
tsk.hasrun = SUCCESS
if tsk.hasrun != SUCCESS:
m.error_handler(tsk)
m.out.put(tsk)
class Parallel(object):
"""
keep the consumer threads busy, and avoid consuming cpu cycles
when no more tasks can be added (end of the build, etc)
"""
def __init__(self, bld, j=2):
# number of consumers
self.numjobs = j
self.manager = bld.task_manager
self.manager.current_group = 0
self.total = self.manager.total()
# tasks waiting to be processed - IMPORTANT
self.outstanding = []
self.maxjobs = MAXJOBS
# tasks that are awaiting for another task to complete
self.frozen = []
# tasks waiting to be run by the consumers
self.ready = Queue(0)
self.out = Queue(0)
self.count = 0 # tasks not in the producer area
self.processed = 1 # progress indicator
self.consumers = None # the consumer threads, created lazily
self.stop = False # error condition to stop the build
self.error = False # error flag
def get_next(self):
"override this method to schedule the tasks in a particular order"
if not self.outstanding:
return None
return self.outstanding.pop(0)
def postpone(self, tsk):
"override this method to schedule the tasks in a particular order"
# TODO consider using a deque instead
if random.randint(0, 1):
self.frozen.insert(0, tsk)
else:
self.frozen.append(tsk)
def refill_task_list(self):
"called to set the next group of tasks"
while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
self.get_out()
while not self.outstanding:
if self.count:
self.get_out()
if self.frozen:
self.outstanding += self.frozen
self.frozen = []
elif not self.count:
(jobs, tmp) = self.manager.get_next_set()
if jobs != None: self.maxjobs = jobs
if tmp: self.outstanding += tmp
break
def get_out(self):
"the tasks that are put to execute are all collected using get_out"
ret = self.out.get()
self.manager.add_finished(ret)
if not self.stop and getattr(ret, 'more_tasks', None):
self.outstanding += ret.more_tasks
self.total += len(ret.more_tasks)
self.count -= 1
def error_handler(self, tsk):
"by default, errors make the build stop (not thread safe so be careful)"
if not Options.options.keep:
self.stop = True
self.error = True
def start(self):
"execute the tasks"
while not self.stop:
self.refill_task_list()
# consider the next task
tsk = self.get_next()
if not tsk:
if self.count:
# tasks may add new ones after they are run
continue
else:
# no tasks to run, no tasks running, time to exit
break
if tsk.hasrun:
# if the task is marked as "run", just skip it
self.processed += 1
self.manager.add_finished(tsk)
continue
try:
st = tsk.runnable_status()
except Exception, e:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
self.processed += 1
self.error_handler(tsk)
self.manager.add_finished(tsk)
continue
if st == ASK_LATER:
self.postpone(tsk)
elif st == SKIP_ME:
self.processed += 1
tsk.hasrun = SKIPPED
self.manager.add_finished(tsk)
else:
# run me: put the task in ready queue
tsk.position = (self.processed, self.total)
self.count += 1
self.ready.put(tsk)
self.processed += 1
# create the consumer threads only if there is something to consume
if not self.consumers:
self.consumers = [TaskConsumer(self) for i in xrange(self.numjobs)]
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
while self.error and self.count:
self.get_out()
#print loop
assert (self.count == 0 or self.stop)

561
tools/wafadmin/Scripting.py

@ -0,0 +1,561 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"Module called for configuring, compiling and installing targets"
import os, sys, shutil, traceback, datetime, inspect, errno
import Utils, Configure, Build, Logs, Options, Environment, Task
from Logs import error, warn, info
from Constants import *
g_gz = 'bz2'
commands = []
def prepare_impl(t, cwd, ver, wafdir):
Options.tooldir = [t]
Options.launch_dir = cwd
# some command-line options can be processed immediately
if '--version' in sys.argv:
opt_obj = Options.Handler()
opt_obj.curdir = cwd
opt_obj.parse_args()
sys.exit(0)
# now find the wscript file
msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
# in theory projects can be configured in a gcc manner:
# mkdir build && cd build && ../waf configure && ../waf
build_dir_override = None
candidate = None
lst = os.listdir(cwd)
search_for_candidate = True
if WSCRIPT_FILE in lst:
candidate = cwd
elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
# gcc-like configuration
calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
if WSCRIPT_FILE in os.listdir(calldir):
candidate = calldir
search_for_candidate = False
else:
error('arg[0] directory does not contain a wscript file')
sys.exit(1)
build_dir_override = cwd
# climb up to find a script if it is not found
while search_for_candidate:
if len(cwd) <= 3:
break # stop at / or c:
dirlst = os.listdir(cwd)
if WSCRIPT_FILE in dirlst:
candidate = cwd
if 'configure' in sys.argv and candidate:
break
if Options.lockfile in dirlst:
env = Environment.Environment()
env.load(os.path.join(cwd, Options.lockfile))
try:
os.stat(env['cwd'])
except:
candidate = cwd
else:
candidate = env['cwd']
break
cwd = os.path.dirname(cwd) # climb up
if not candidate:
# check if the user only wanted to display the help
if '-h' in sys.argv or '--help' in sys.argv:
warn('No wscript file found: the help message may be incomplete')
opt_obj = Options.Handler()
opt_obj.curdir = cwd
opt_obj.parse_args()
else:
error(msg1)
sys.exit(0)
# We have found wscript, but there is no guarantee that it is valid
try:
os.chdir(candidate)
except OSError:
raise Utils.WafError("the folder %r is unreadable" % candidate)
# define the main module containing the functions init, shutdown, ..
Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
if build_dir_override:
d = getattr(Utils.g_module, BLDDIR, None)
if d:
# test if user has set the blddir in wscript.
msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
warn(msg)
Utils.g_module.blddir = build_dir_override
# bind a few methods and classes by default
def set_def(obj, name=''):
n = name or obj.__name__
if not n in Utils.g_module.__dict__:
setattr(Utils.g_module, n, obj)
for k in [dist, distclean, distcheck, clean, install, uninstall]:
set_def(k)
set_def(Configure.ConfigurationContext, 'configure_context')
for k in ['build', 'clean', 'install', 'uninstall']:
set_def(Build.BuildContext, k + '_context')
# now parse the options from the user wscript file
opt_obj = Options.Handler(Utils.g_module)
opt_obj.curdir = candidate
try:
f = Utils.g_module.set_options
except AttributeError:
pass
else:
opt_obj.sub_options([''])
opt_obj.parse_args()
if not 'init' in Utils.g_module.__dict__:
Utils.g_module.init = Utils.nada
if not 'shutdown' in Utils.g_module.__dict__:
Utils.g_module.shutdown = Utils.nada
main()
def prepare(t, cwd, ver, wafdir):
if WAFVERSION != ver:
msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
print('\033[91mError: %s\033[0m' % msg)
sys.exit(1)
#"""
try:
prepare_impl(t, cwd, ver, wafdir)
except Utils.WafError, e:
error(str(e))
sys.exit(1)
except KeyboardInterrupt:
Utils.pprint('RED', 'Interrupted')
sys.exit(68)
"""
import cProfile, pstats
cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
{'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(45)
#"""
def main():
global commands
commands = Options.arg_line[:]
while commands:
x = commands.pop(0)
ini = datetime.datetime.now()
if x == 'configure':
fun = configure
elif x == 'build':
fun = build
else:
fun = getattr(Utils.g_module, x, None)
if not fun:
raise Utils.WscriptError('No such command %r' % x)
ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
# compatibility TODO remove in waf 1.6
try:
fun(ctx)
except TypeError:
fun()
else:
fun(ctx)
ela = ''
if not Options.options.progress_bar:
ela = ' (%s)' % Utils.get_elapsed_time(ini)
if x != 'init' and x != 'shutdown':
info('%r finished successfully%s' % (x, ela))
if not commands and x != 'shutdown':
commands.append('shutdown')
def configure(conf):
src = getattr(Options.options, SRCDIR, None)
if not src: src = getattr(Utils.g_module, SRCDIR, None)
if not src:
src = '.'
incomplete_src = 1
src = os.path.abspath(src)
bld = getattr(Options.options, BLDDIR, None)
if not bld:
bld = getattr(Utils.g_module, BLDDIR, None)
if bld == '.':
raise Utils.WafError('Setting blddir="." may cause distclean problems')
if not bld:
bld = 'build'
incomplete_bld = 1
bld = os.path.abspath(bld)
try: os.makedirs(bld)
except OSError: pass
# It is not possible to compile specific targets in the configuration
# this may cause configuration errors if autoconfig is set
targets = Options.options.compile_targets
Options.options.compile_targets = None
Options.is_install = False
conf.srcdir = src
conf.blddir = bld
conf.post_init()
if 'incomplete_src' in vars():
conf.check_message_1('Setting srcdir to')
conf.check_message_2(src)
if 'incomplete_bld' in vars():
conf.check_message_1('Setting blddir to')
conf.check_message_2(bld)
# calling to main wscript's configure()
conf.sub_config([''])
conf.store()
# this will write a configure lock so that subsequent builds will
# consider the current path as the root directory (see prepare_impl).
# to remove: use 'waf distclean'
env = Environment.Environment()
env[BLDDIR] = bld
env[SRCDIR] = src
env['argv'] = sys.argv
env['commands'] = Options.commands
env['options'] = Options.options.__dict__
# conf.hash & conf.files hold wscript files paths and hash
# (used only by Configure.autoconfig)
env['hash'] = conf.hash
env['files'] = conf.files
env['environ'] = dict(conf.environ)
env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
if Utils.g_module.root_path != src:
# in case the source dir is somewhere else
env.store(os.path.join(src, Options.lockfile))
env.store(Options.lockfile)
Options.options.compile_targets = targets
def clean(bld):
'''removes the build files'''
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Nothing to clean (project not configured)')
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
bld.is_install = 0 # False
# read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
try:
bld.clean()
finally:
bld.save()
def check_configured(bld):
if not Configure.autoconfig:
return bld
conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
def reconf(proj):
back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
Options.commands = proj['commands']
Options.options.__dict__ = proj['options']
conf = conf_cls()
conf.environ = proj['environ']
configure(conf)
(Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
conf = conf_cls()
configure(conf)
else:
try:
bld = bld_cls()
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
except Utils.WafError:
reconf(proj)
return bld_cls()
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Auto-config: project does not configure (bug)')
h = 0
try:
for file in proj['files']:
if file.endswith('configure'):
h = hash((h, Utils.readf(file)))
else:
mod = Utils.load_module(file)
h = hash((h, mod.waf_hash_val))
except (OSError, IOError):
warn('Reconfiguring the project: a file is unavailable')
reconf(proj)
else:
if (h != proj['hash']):
warn('Reconfiguring the project: the configuration has changed')
reconf(proj)
return bld_cls()
def install(bld):
'''installs the build files'''
bld = check_configured(bld)
Options.commands['install'] = True
Options.commands['uninstall'] = False
Options.is_install = True
bld.is_install = INSTALL
build_impl(bld)
bld.install()
def uninstall(bld):
'''removes the installed files'''
Options.commands['install'] = False
Options.commands['uninstall'] = True
Options.is_install = True
bld.is_install = UNINSTALL
try:
def runnable_status(self):
return SKIP_ME
setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
setattr(Task.Task, 'runnable_status', runnable_status)
build_impl(bld)
bld.install()
finally:
setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
def build(bld):
bld = check_configured(bld)
Options.commands['install'] = False
Options.commands['uninstall'] = False
Options.is_install = False
bld.is_install = 0 # False
return build_impl(bld)
def build_impl(bld):
# compile the project and/or install the files
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError("Project not configured (run 'waf configure' first)")
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
# execute something immediately before the build starts
bld.pre_build()
try:
bld.compile()
finally:
if Options.options.progress_bar: print('')
info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
# execute something immediately after a successful build
bld.post_build()
bld.install()
excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log'.split()
dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
def dont_dist(name, src, build_dir):
global excludes, dist_exts
if (name.startswith(',,')
or name.startswith('++')
or name.startswith('.waf-1.')
or (src == '.' and name == Options.lockfile)
or name in excludes
or name == build_dir
):
return True
for ext in dist_exts:
if name.endswith(ext):
return True
return False
# like shutil.copytree
# exclude files and to raise exceptions immediately
def copytree(src, dst, build_dir):
names = os.listdir(src)
os.makedirs(dst)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if dont_dist(name, src, build_dir):
continue
if os.path.isdir(srcname):
copytree(srcname, dstname, build_dir)
else:
shutil.copy2(srcname, dstname)
# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
def distclean(ctx=None):
'''removes the build directory'''
lst = os.listdir('.')
for f in lst:
if f == Options.lockfile:
try:
proj = Environment.Environment(f)
except:
Logs.warn('could not read %r' % f)
continue
try:
shutil.rmtree(proj[BLDDIR])
except IOError:
pass
except OSError, e:
if e.errno != errno.ENOENT:
Logs.warn('project %r cannot be removed' % proj[BLDDIR])
try:
os.remove(f)
except OSError, e:
if e.errno != errno.ENOENT:
Logs.warn('file %r cannot be removed' % f)
# remove the local waf cache
if f.startswith('.waf-'):
shutil.rmtree(f, ignore_errors=True)
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
def dist(appname='', version=''):
'''makes a tarball for redistributing the sources'''
# return return (distdirname, tarballname)
import tarfile
if not appname: appname = getattr(Utils.g_module, APPNAME, 'noname')
if not version: version = getattr(Utils.g_module, VERSION, '1.0')
tmp_folder = appname + '-' + version
arch_name = tmp_folder+'.tar.'+g_gz
# remove the previous dir
try:
shutil.rmtree(tmp_folder)
except (OSError, IOError):
pass
# remove the previous archive
try:
os.remove(arch_name)
except (OSError, IOError):
pass
# copy the files into the temporary folder
copytree('.', tmp_folder, getattr(Utils.g_module, BLDDIR, None))
# undocumented hook for additional cleanup
dist_hook = getattr(Utils.g_module, 'dist_hook', None)
if dist_hook:
back = os.getcwd()
os.chdir(tmp_folder)
try:
dist_hook()
finally:
# go back to the root directory
os.chdir(back)
tar = tarfile.open(arch_name, 'w:' + g_gz)
tar.add(tmp_folder)
tar.close()
try: from hashlib import sha1 as sha
except ImportError: from sha import sha
try:
digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
except:
digest = ''
info('New archive created: %s%s' % (arch_name, digest))
if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
return arch_name
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
def distcheck(appname='', version=''):
'''checks if the sources compile (tarball from 'dist')'''
import tempfile, tarfile
if not appname: appname = getattr(Utils.g_module, APPNAME, 'noname')
if not version: version = getattr(Utils.g_module, VERSION, '1.0')
waf = os.path.abspath(sys.argv[0])
tarball = dist(appname, version)
t = tarfile.open(tarball)
for x in t: t.extract(x)
t.close()
path = appname + '-' + version
instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
ret = Utils.pproc.Popen([waf, 'configure', 'install', 'uninstall', '--destdir=' + instdir], cwd=path).wait()
if ret:
raise Utils.WafError('distcheck failed with code %i' % ret)
if os.path.exists(instdir):
raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
shutil.rmtree(path)
# FIXME remove in Waf 1.6 (kept for compatibility)
def add_subdir(dir, bld):
bld.recurse(dir, 'build')

1078
tools/wafadmin/Task.py

File diff suppressed because it is too large

576
tools/wafadmin/TaskGen.py

@ -0,0 +1,576 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"""
The class task_gen encapsulates the creation of task objects (low-level code)
The instances can have various parameters, but the creation of task nodes (Task.py)
is delayed. To achieve this, various methods are called from the method "apply"
The class task_gen contains lots of methods, and a configuration table:
* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
* the order of the methods (self.prec or by default task_gen.prec) is configurable
* new methods can be inserted dynamically without pasting old code
Additionally, task_gen provides the method apply_core
* file extensions are mapped to methods: def meth(self, name_or_node)
* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
* when called, the functions may modify self.allnodes to re-add source to process
* the mappings can map an extension or a filename (see the code below)
WARNING: subclasses must reimplement the clone method
"""
import os, traceback, copy
import Build, Task, Utils, Logs, Options
from Logs import debug, error, warn
from Constants import *
typos = {
'sources':'source',
'targets':'target',
'include':'includes',
'define':'defines',
'importpath':'importpaths',
'install_var':'install_path',
'install_subdir':'install_path',
'inst_var':'install_path',
'inst_dir':'install_path',
'feature':'features',
}
class register_obj(type):
"""no decorators for classes, so we use a metaclass
we store into task_gen.classes the classes that inherit task_gen
and whose names end in '_taskgen'
"""
def __init__(cls, name, bases, dict):
super(register_obj, cls).__init__(name, bases, dict)
name = cls.__name__
suffix = '_taskgen'
if name.endswith(suffix):
task_gen.classes[name.replace(suffix, '')] = cls
class task_gen(object):
"""
Most methods are of the form 'def meth(self):' without any parameters
there are many of them, and they do many different things:
* task creation
* task results installation
* environment modification
* attribute addition/removal
The inheritance approach is complicated
* mixing several languages at once
* subclassing is needed even for small changes
* inserting new methods is complicated
This new class uses a configuration table:
* adding new methods easily
* obtaining the order in which to call the methods
* postponing the method calls (post() -> apply)
Additionally, a 'traits' static attribute is provided:
* this list contains methods
* the methods can remove or add methods from self.meths
Example1: the attribute 'staticlib' is set on an instance
a method set in the list of traits is executed when the
instance is posted, it finds that flag and adds another method for execution
Example2: a method set in the list of traits finds the msvc
compiler (from self.env['MSVC']==1); more methods are added to self.meths
"""
__metaclass__ = register_obj
mappings = {}
mapped = {}
prec = Utils.DefaultDict(list)
traits = Utils.DefaultDict(set)
classes = {}
def __init__(self, *kw, **kwargs):
self.prec = Utils.DefaultDict(list)
"map precedence of function names to call"
# so we will have to play with directed acyclic graphs
# detect cycles, etc
self.source = ''
self.target = ''
# list of methods to execute - does not touch it by hand unless you know
self.meths = []
# list of mappings extension -> function
self.mappings = {}
# list of features (see the documentation on traits)
self.features = list(kw)
# not always a good idea
self.tasks = []
self.default_chmod = O644
self.default_install_path = None
# kind of private, beware of what you put in it, also, the contents are consumed
self.allnodes = []
self.bld = kwargs.get('bld', Build.bld)
self.env = self.bld.env.copy()
self.path = self.bld.path # emulate chdir when reading scripts
self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
# provide a unique id
self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
for key, val in kwargs.iteritems():
setattr(self, key, val)
self.bld.task_manager.add_task_gen(self)
self.bld.all_task_gen.append(self)
def __str__(self):
return ("<task_gen '%s' of type %s defined in %s>"
% (self.name or self.target, self.__class__.__name__, str(self.path)))
def __setattr__(self, name, attr):
real = typos.get(name, name)
if real != name:
warn('typo %s -> %s' % (name, real))
if Logs.verbose > 0:
traceback.print_stack()
object.__setattr__(self, real, attr)
def to_list(self, value):
"helper: returns a list"
if isinstance(value, str): return value.split()
else: return value
def apply(self):
"order the methods to execute using self.prec or task_gen.prec"
keys = set(self.meths)
# add the methods listed in the features
self.features = Utils.to_list(self.features)
for x in self.features + ['*']:
st = task_gen.traits[x]
if not st:
warn('feature %r does not exist - bind at least one method to it' % x)
keys.update(st)
# copy the precedence table
prec = {}
prec_tbl = self.prec or task_gen.prec
for x in prec_tbl:
if x in keys:
prec[x] = prec_tbl[x]
# elements disconnected
tmp = []
for a in keys:
for x in prec.values():
if a in x: break
else:
tmp.append(a)
# topological sort
out = []
while tmp:
e = tmp.pop()
if e in keys: out.append(e)
try:
nlst = prec[e]
except KeyError:
pass
else:
del prec[e]
for x in nlst:
for y in prec:
if x in prec[y]:
break
else:
tmp.append(x)
if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
out.reverse()
self.meths = out
# then we run the methods in order
debug('task_gen: posting %s %d' % (self, id(self)))
for x in out:
try:
v = getattr(self, x)
except AttributeError:
raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
debug('task_gen: -> %s (%d)' % (x, id(self)))
v()
def post(self):
"runs the code to create the tasks, do not subclass"
if not self.name:
if isinstance(self.target, list):
self.name = ' '.join(self.target)
else:
self.name = self.target
if getattr(self, 'posted', None):
#error("OBJECT ALREADY POSTED" + str( self))
return
self.apply()
debug('task_gen: posted %s' % self.name)
self.posted = True
def get_hook(self, ext):
try: return self.mappings[ext]
except KeyError:
try: return task_gen.mappings[ext]
except KeyError: return None
# TODO waf 1.6: always set the environment
# TODO waf 1.6: create_task(self, name, inputs, outputs)
def create_task(self, name, env=None):
env = env or self.env
task = Task.TaskBase.classes[name](env.copy(), generator=self)
self.tasks.append(task)
return task
def name_to_obj(self, name):
return self.bld.name_to_obj(name, self.env)
def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
"""
The attributes "excludes" and "exts" must be lists to avoid the confusion
find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
do not use absolute paths
do not use paths outside of the source tree
the files or folder beginning by . are not returned
# TODO: remove in Waf 1.6
"""
err_msg = "'%s' attribute must be a list"
if not isinstance(excludes, list):
raise Utils.WscriptError(err_msg % 'excludes')
if not isinstance(exts, list):
raise Utils.WscriptError(err_msg % 'exts')
lst = []
#make sure dirnames is a list helps with dirnames with spaces
dirnames = self.to_list(dirnames)
ext_lst = exts or self.mappings.keys() + task_gen.mappings.keys()
for name in dirnames:
anode = self.path.find_dir(name)
if not anode or not anode.is_child_of(self.bld.srcnode):
raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
", or it's not child of '%s'." % (name, self.bld.srcnode))
self.bld.rescan(anode)
for name in self.bld.cache_dir_contents[anode.id]:
# ignore hidden files
if name.startswith('.'):
continue
(base, ext) = os.path.splitext(name)
if ext in ext_lst and not name in lst and not name in excludes:
lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
lst.sort()
self.source = self.to_list(self.source)
if not self.source: self.source = lst
else: self.source += lst
def clone(self, env):
""
newobj = task_gen(bld=self.bld)
for x in self.__dict__:
if x in ['env', 'bld']:
continue
elif x in ["path", "features"]:
setattr(newobj, x, getattr(self, x))
else:
setattr(newobj, x, copy.copy(getattr(self, x)))
newobj.__class__ = self.__class__
if isinstance(env, str):
newobj.env = self.bld.all_envs[env].copy()
else:
newobj.env = env.copy()
return newobj
def get_inst_path(self):
return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
def set_inst_path(self, val):
self._install_path = val
install_path = property(get_inst_path, set_inst_path)
def get_chmod(self):
return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
def set_chmod(self, val):
self._chmod = val
chmod = property(get_chmod, set_chmod)
def declare_extension(var, func):
try:
for x in Utils.to_list(var):
task_gen.mappings[x] = func
except:
raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
task_gen.mapped[func.__name__] = func
def declare_order(*k):
assert(len(k) > 1)
n = len(k) - 1
for i in xrange(n):
f1 = k[i]
f2 = k[i+1]
if not f1 in task_gen.prec[f2]:
task_gen.prec[f2].append(f1)
def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=1, color='BLUE',
install=0, before=[], after=[], decider=None, rule=None, scan=None):
"""
see Tools/flex.py for an example
while i do not like such wrappers, some people really do
"""
action = action or rule
if isinstance(action, str):
act = Task.simple_task_type(name, action, color=color)
else:
act = Task.task_type_from_func(name, action, color=color)
act.ext_in = tuple(Utils.to_list(ext_in))
act.ext_out = tuple(Utils.to_list(ext_out))
act.before = Utils.to_list(before)
act.after = Utils.to_list(after)
act.scan = scan
def x_file(self, node):
if decider:
ext = decider(self, node)
elif isinstance(ext_out, str):
ext = ext_out
if isinstance(ext, str):
out_source = node.change_ext(ext)
if reentrant:
self.allnodes.append(out_source)
elif isinstance(ext, list):
out_source = [node.change_ext(x) for x in ext]
if reentrant:
for i in xrange(reentrant):
self.allnodes.append(out_source[i])
else:
# XXX: useless: it will fail on Utils.to_list above...
raise Utils.WafError("do not know how to process %s" % str(ext))
tsk = self.create_task(name)
tsk.set_inputs(node)
tsk.set_outputs(out_source)
if node.__class__.bld.is_install == INSTALL:
tsk.install = install
declare_extension(act.ext_in, x_file)
def bind_feature(name, methods):
lst = Utils.to_list(methods)
task_gen.traits[name].update(lst)
"""
All the following decorators are registration decorators, i.e add an attribute to current class
(task_gen and its derivatives), with same name as func, which points to func itself.
For example:
@taskgen
def sayHi(self):
print("hi")
Now taskgen.sayHi() may be called
If python were really smart, it could infer itself the order of methods by looking at the
attributes. A prerequisite for execution is to have the attribute set before.
Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
"""
def taskgen(func):
setattr(task_gen, func.__name__, func)
def feature(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for name in k:
task_gen.traits[name].update([func.__name__])
return func
return deco
def before(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
return func
return deco
def after(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
return func
return deco
def extension(var):
def deco(func):
setattr(task_gen, func.__name__, func)
try:
for x in Utils.to_list(var):
task_gen.mappings[x] = func
except:
raise Utils.WafError('extension takes either a list or a string %r' % var)
task_gen.mapped[func.__name__] = func
return func
return deco
# TODO make certain the decorators may be used here
def apply_core(self):
"""Process the attribute source
transform the names into file nodes
try to process the files by name first, later by extension"""
# get the list of folders to use by the scanners
# all our objects share the same include paths anyway
find_resource = self.path.find_resource
for filename in self.to_list(self.source):
# if self.mappings or task_gen.mappings contains a file of the same name
x = self.get_hook(filename)
if x:
x(self, filename)
else:
node = find_resource(filename)
if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
self.allnodes.append(node)
for node in self.allnodes:
# self.mappings or task_gen.mappings map the file extension to a function
x = self.get_hook(node.suffix())
if not x:
raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
(str(node), self.__class__.mappings.keys(), self.__class__))
x(self, node)
feature('*')(apply_core)
def exec_rule(self):
"""Process the attribute rule, when provided the method apply_core will be disabled
"""
if not getattr(self, 'rule', None):
return
# someone may have removed it already
try:
self.meths.remove('apply_core')
except ValueError:
pass
# get the function and the variables
func = self.rule
vars2 = []
if isinstance(func, str):
# use the shell by default for user-defined commands
(func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
func.code = self.rule
vars = getattr(self, 'vars', vars2)
if not vars:
if isinstance(self.rule, str):
vars = self.rule
else:
vars = Utils.h_fun(self.rule)
# create the task class
name = getattr(self, 'name', None) or self.target or self.rule
cls = Task.task_type_from_func(name, func, vars)
# now create one instance
tsk = self.create_task(name)
# we assume that the user knows that without inputs or outputs
#if not getattr(self, 'target', None) and not getattr(self, 'source', None):
# cls.quiet = True
if getattr(self, 'target', None):
cls.quiet = True
tsk.outputs=[self.path.find_or_declare(x) for x in self.to_list(self.target)]
if getattr(self, 'source', None):
cls.quiet = True
tsk.inputs = []
for x in self.to_list(self.source):
y = self.path.find_resource(x)
if not y:
raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
tsk.inputs.append(y)
if getattr(self, 'always', None):
Task.always_run(cls)
if getattr(self, 'scan', None):
cls.scan = self.scan
if getattr(self, 'install_path', None):
tsk.install_path = self.install_path
if getattr(self, 'cwd', None):
tsk.cwd = self.cwd
if getattr(self, 'on_results', None):
Task.update_outputs(cls)
for x in ['after', 'before']:
setattr(cls, x, getattr(self, x, []))
feature('*')(exec_rule)
before('apply_core')(exec_rule)
def sequence_order(self):
"""
add a strict sequential constraint between the tasks generated by task generators
it uses the fact that task generators are posted in order
it will not post objects which belong to other folders
there is also an awesome trick for executing the method in last position
to use:
bld.new_task_gen(features='javac seq')
bld.new_task_gen(features='jar seq')
to start a new sequence, set the attribute seq_start, for example:
obj.seq_start = True
"""
if self.meths and self.meths[-1] != 'sequence_order':
self.meths.append('sequence_order')
return
if getattr(self, 'seq_start', None):
return
# all the tasks previously declared must be run before these
if getattr(self.bld, 'prev', None):
self.bld.prev.post()
for x in self.bld.prev.tasks:
for y in self.tasks:
y.set_run_after(x)
self.bld.prev = self
feature('seq')(sequence_order)

266
tools/wafadmin/Tools/UnitTest.py

@ -0,0 +1,266 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2006
"""
Unit tests run in the shutdown() method, and for c/c++ programs
One should NOT have to give parameters to programs to execute
In the shutdown method, add the following code:
>>> def shutdown():
... ut = UnitTest.unit_test()
... ut.run()
... ut.print_results()
Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
"""
import os, sys
import Build, TaskGen, Utils, Options, Logs, Task
from Constants import *
class unit_test(object):
"Unit test representation"
def __init__(self):
self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one
# will cause the unit test to be marked as "FAILED".
# The following variables are filled with data by run().
# print_results() uses these for printing the unit test summary,
# but if there is need for direct access to the results,
# they can be retrieved here, after calling run().
self.num_tests_ok = 0 # Number of successful unit tests
self.num_tests_failed = 0 # Number of failed unit tests
self.num_tests_err = 0 # Tests that have not even run
self.total_num_tests = 0 # Total amount of unit tests
self.max_label_length = 0 # Maximum label length (pretty-print the output)
self.unit_tests = Utils.ordered_dict() # Unit test dictionary. Key: the label (unit test filename relative
# to the build dir), value: unit test filename with absolute path
self.unit_test_results = {} # Dictionary containing the unit test results.
# Key: the label, value: result (true = success false = failure)
self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests.
# Key: the label, value: true = unit test has an error false = unit test is ok
self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
self.run_if_waf_does = 'check' #build was the old default
def run(self):
"Run the unit tests and gather results (note: no output here)"
self.num_tests_ok = 0
self.num_tests_failed = 0
self.num_tests_err = 0
self.total_num_tests = 0
self.max_label_length = 0
self.unit_tests = Utils.ordered_dict()
self.unit_test_results = {}
self.unit_test_erroneous = {}
ld_library_path = []
# If waf is not building, don't run anything
if not Options.commands[self.run_if_waf_does]: return
# Get the paths for the shared libraries, and obtain the unit tests to execute
for obj in Build.bld.all_task_gen:
try:
link_task = obj.link_task
except AttributeError:
pass
else:
lib_path = link_task.outputs[0].parent.abspath(obj.env)
if lib_path not in ld_library_path:
ld_library_path.append(lib_path)
unit_test = getattr(obj, 'unit_test', '')
if unit_test and 'cprogram' in obj.features:
try:
output = obj.path
filename = os.path.join(output.abspath(obj.env), obj.target)
srcdir = output.abspath()
label = os.path.join(output.bldpath(obj.env), obj.target)
self.max_label_length = max(self.max_label_length, len(label))
self.unit_tests[label] = (filename, srcdir)
except KeyError:
pass
self.total_num_tests = len(self.unit_tests)
# Now run the unit tests
Utils.pprint('GREEN', 'Running the unit tests')
count = 0
result = 1
for label in self.unit_tests.allkeys:
file_and_src = self.unit_tests[label]
filename = file_and_src[0]
srcdir = file_and_src[1]
count += 1
line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
if Options.options.progress_bar and line:
sys.stderr.write(line)
sys.stderr.flush()
try:
kwargs = {}
kwargs['env'] = os.environ.copy()
if self.change_to_testfile_dir:
kwargs['cwd'] = srcdir
if not self.want_to_see_test_output:
kwargs['stdout'] = Utils.pproc.PIPE # PIPE for ignoring output
if not self.want_to_see_test_error:
kwargs['stderr'] = Utils.pproc.PIPE # PIPE for ignoring output
if ld_library_path:
v = kwargs['env']
def add_path(dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
if sys.platform == 'win32':
add_path(v, ld_library_path, 'PATH')
elif sys.platform == 'darwin':
add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
else:
add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
pp = Utils.pproc.Popen(filename, **kwargs)
pp.wait()
result = int(pp.returncode == self.returncode_ok)
if result:
self.num_tests_ok += 1
else:
self.num_tests_failed += 1
self.unit_test_results[label] = result
self.unit_test_erroneous[label] = 0
except OSError:
self.unit_test_erroneous[label] = 1
self.num_tests_err += 1
except KeyboardInterrupt:
pass
if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
def print_results(self):
"Pretty-prints a summary of all unit tests, along with some statistics"
# If waf is not building, don't output anything
if not Options.commands[self.run_if_waf_does]: return
p = Utils.pprint
# Early quit if no tests were performed
if self.total_num_tests == 0:
p('YELLOW', 'No unit tests present')
return
for label in self.unit_tests.allkeys:
filename = self.unit_tests[label]
err = 0
result = 0
try: err = self.unit_test_erroneous[label]
except KeyError: pass
try: result = self.unit_test_results[label]
except KeyError: pass
n = self.max_label_length - len(label)
if err: n += 4
elif result: n += 7
else: n += 3
line = '%s %s' % (label, '.' * n)
if err: p('RED', '%sERROR' % line)
elif result: p('GREEN', '%sOK' % line)
else: p('YELLOW', '%sFAILED' % line)
percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
p('NORMAL', '''
Successful tests: %i (%.1f%%)
Failed tests: %i (%.1f%%)
Erroneous tests: %i (%.1f%%)
Total number of tests: %i
''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
self.num_tests_err, percentage_erroneous, self.total_num_tests))
p('GREEN', 'Unit tests finished')
############################################################################################
"""
New unit test system
The targets with feature 'test' are executed after they are built
bld.new_task_gen(features='cprogram cc test', ...)
To display the results:
import UnitTest
bld.add_post_fun(UnitTest.summary)
"""
import threading
testlock = threading.Lock()
@TaskGen.feature('test')
@TaskGen.after('apply_link')
def make_test(self):
if not 'cprogram' in self.features:
Logs.error('test cannot be executed %s' % self)
return
tsk = self.create_task('utest')
tsk.set_inputs(self.link_task.outputs)
def exec_test(self):
fail = False
try:
testlock.acquire()
filename = self.inputs[0].abspath(self.env)
try:
ret = Utils.cmd_output(filename, cwd='/cygdrive/c/home/waf-1.5.8/demos/unit_test/tests/test0')
except Exception, e:
fail = True
ret = ""
else:
pass
stats = getattr(self.generator.bld, 'utest_results', [])
stats.append((filename, fail, ret))
self.generator.bld.utest_results = stats
testlock.release()
except Exception, e:
print e
cls = Task.task_type_from_func('utest', func=exec_test, color='RED', ext_in='.bin')
old = cls.runnable_status
def test_status(self):
if getattr(Options.options, 'all_tests', False):
return RUN_ME
return old(self)
cls.runnable_status = test_status
cls.quiet = 1
def summary(bld):
lst = getattr(bld, 'utest_results', [])
if lst:
Utils.pprint('CYAN', 'execution summary')
for (f, fail, ret) in lst:
col = fail and 'RED' or 'GREEN'
Utils.pprint(col, (fail and 'FAIL' or 'ok') + " " + f)
def set_options(opt):
opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')

4
tools/wafadmin/Tools/__init__.py

@ -0,0 +1,4 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)

36
tools/wafadmin/Tools/ar.py

@ -0,0 +1,36 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
# Ralf Habacker, 2006 (rh)
"ar and ranlib"
import os, sys
import Task, Utils
from Configure import conftest
ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada
# remove the output in case it already exists
old = cls.run
def wrap(self):
try: os.remove(self.outputs[0].abspath(self.env))
except OSError: pass
return old(self)
setattr(cls, 'run', wrap)
def detect(conf):
conf.find_program('ar', var='AR')
conf.find_program('ranlib', var='RANLIB')
conf.env.ARFLAGS = 'rcs'
@conftest
def find_ar(conf):
v = conf.env
conf.check_tool('ar')
if not v['AR']: conf.fatal('ar is required for static libraries - not found')

42
tools/wafadmin/Tools/bison.py

@ -0,0 +1,42 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy 2009
"Bison processing"
import Task
from TaskGen import extension
bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', before='cxx', shell=False)
@extension(['.y', '.yc', '.yy'])
def big_bison(self, node):
"""when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
tsk = self.create_task('bison')
tsk.set_inputs(node)
has_h = '-d' in self.env['BISONFLAGS']
outs = []
if node.name.endswith('.yc'):
outs.append(node.change_ext('.tab.cc'))
if has_h:
outs.append(node.change_ext('.tab.hh'))
else:
outs.append(node.change_ext('.tab.c'))
if has_h:
outs.append(node.change_ext('.tab.h'))
tsk.set_outputs(outs)
tsk.cwd = node.bld_dir(tsk.env)
# and the c/cxx file must be compiled too
self.allnodes.append(outs[0])
def detect(conf):
bison = conf.find_program('bison', var='BISON', mandatory=True)
v = conf.env
v['BISONFLAGS'] = '-d'

335
tools/wafadmin/Tools/boost.py

@ -0,0 +1,335 @@
#!/usr/bin/env python
# encoding: utf-8
#
# partially based on boost.py written by Gernot Vormayr
# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
# modified by Bjoern Michaelsen, 2008
# modified by Luca Fossati, 2008
# rewritten for waf 1.5.1, Thomas Nagy, 2008
#
#def set_options(opt):
# opt.tool_options('boost')
# # ...
#
#def configure(conf):
# # ... (e.g. conf.check_tool('g++'))
# conf.check_tool('boost')
# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
#
#def build(bld):
# bld.new_task_gen(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
#
#ISSUES:
# * find_includes should be called only once!
# * support mandatory
######## boost update ###########
## ITA: * the method get_boost_version_number does work
## * the rest of the code has not really been tried
# * make certain a demo is provided (in demos/adv for example)
# TODO: boost.py will be removed in waf 1.6
import os.path, glob, types, re, sys
import Configure, config_c, Options, Utils, Logs
from Logs import warn
from Configure import conf
boost_code = '''
#include <iostream>
#include <boost/version.hpp>
int main() { std::cout << BOOST_VERSION << std::endl; }
'''
boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
STATIC_NOSTATIC = 'nostatic'
STATIC_BOTH = 'both'
STATIC_ONLYSTATIC = 'onlystatic'
is_versiontag = re.compile('^\d+_\d+_?\d*$')
is_threadingtag = re.compile('^mt$')
is_abitag = re.compile('^[sgydpn]+$')
is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
def set_options(opt):
opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
def string_to_version(s):
version = s.split('.')
if len(version) < 3: return 0
return int(version[0])*100000 + int(version[1])*100 + int(version[2])
def version_string(version):
major = version / 100000
minor = version / 100 % 1000
minor_minor = version % 100
if minor_minor == 0:
return "%d_%d" % (major, minor)
else:
return "%d_%d_%d" % (major, minor, minor_minor)
def libfiles(lib, pattern, lib_paths):
result = []
for lib_path in lib_paths:
libname = pattern % ('boost_' + lib + '*')
result += glob.glob(lib_path + '/' + libname)
return result
@conf
def get_boost_version_number(self, dir):
"""silently retrieve the boost version number"""
try:
return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
except Configure.ConfigurationError, e:
return -1
def set_default(kw, var, val):
if not var in kw:
kw[var] = val
def tags_score(tags, kw):
"""
checks library tags
see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
"""
score = 0
needed_tags = {
'threading': kw['tag_threading'],
'abi': kw['tag_abi'],
'toolset': kw['tag_toolset'],
'version': kw['tag_version']
}
if kw['tag_toolset'] is None:
v = kw['env']
toolset = v['CXX_NAME']
if v['CXX_VERSION']:
version_no = v['CXX_VERSION'].split('.')
toolset += version_no[0]
if len(version_no) > 1:
toolset += version_no[1]
needed_tags['toolset'] = toolset
found_tags = {}
for tag in tags:
if is_versiontag.match(tag): found_tags['version'] = tag
if is_threadingtag.match(tag): found_tags['threading'] = tag
if is_abitag.match(tag): found_tags['abi'] = tag
if is_toolsettag.match(tag): found_tags['toolset'] = tag
for tagname in needed_tags.iterkeys():
if needed_tags[tagname] is not None and tagname in found_tags:
if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
score += kw['score_' + tagname][0]
else:
score += kw['score_' + tagname][1]
return score
@conf
def validate_boost(self, kw):
ver = kw.get('version', '')
for x in 'min_version max_version version'.split():
set_default(kw, x, ver)
set_default(kw, 'lib', '')
kw['lib'] = Utils.to_list(kw['lib'])
set_default(kw, 'env', self.env)
set_default(kw, 'libpath', boost_libpath)
set_default(kw, 'cpppath', boost_cpppath)
for x in 'tag_threading tag_version tag_toolset'.split():
set_default(kw, x, None)
set_default(kw, 'tag_abi', '^[^d]*$')
set_default(kw, 'score_threading', (10, -10))
set_default(kw, 'score_abi', (10, -10))
set_default(kw, 'score_toolset', (1, -1))
set_default(kw, 'score_version', (100, -100))
set_default(kw, 'score_min', 0)
set_default(kw, 'static', STATIC_NOSTATIC)
set_default(kw, 'found_includes', False)
set_default(kw, 'min_score', 0)
set_default(kw, 'errmsg', 'not found')
set_default(kw, 'okmsg', 'ok')
@conf
def find_boost_includes(self, kw):
"""
check every path in kw['cpppath'] for subdir
that either starts with boost- or is named boost.
Then the version is checked and selected accordingly to
min_version/max_version. The highest possible version number is
selected!
If no versiontag is set the versiontag is set accordingly to the
selected library and CPPPATH_BOOST is set.
"""
boostPath = getattr(Options.options, 'boostincludes', '')
if boostPath:
boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
else:
boostPath = Utils.to_list(kw['cpppath'])
min_version = string_to_version(kw.get('min_version', ''))
max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
version = 0
for include_path in boostPath:
boost_paths = glob.glob(os.path.join(include_path, 'boost*'))
for path in boost_paths:
pathname = os.path.split(path)[-1]
ret = -1
if pathname == 'boost':
path = include_path
ret = self.get_boost_version_number(path)
elif pathname.startswith('boost-'):
ret = self.get_boost_version_number(path)
ret = int(ret)
if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
boost_path = path
version = ret
if not version:
self.fatal('boost headers not found! (required version min: %s max: %s)'
% (kw['min_version'], kw['max_version']))
return False
found_version = version_string(version)
versiontag = '^' + found_version + '$'
if kw['tag_version'] is None:
kw['tag_version'] = versiontag
elif kw['tag_version'] != versiontag:
warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
env = self.env
env['CPPPATH_BOOST'] = boost_path
env['BOOST_VERSION'] = found_version
self.found_includes = 1
ret = 'Version %s (%s)' % (found_version, boost_path)
return ret
@conf
def find_boost_library(self, lib, kw):
def find_library_from_list(lib, files):
lib_pattern = re.compile('.*boost_(.*?)\..*')
result = (None, None)
resultscore = kw['min_score'] - 1
for file in files:
m = lib_pattern.search(file, 1)
if m:
libname = m.group(1)
libtags = libname.split('-')[1:]
currentscore = tags_score(libtags, kw)
if currentscore > resultscore:
result = (libname, file)
resultscore = currentscore
return result
lib_paths = getattr(Options.options, 'boostlibs', '')
if lib_paths:
lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
else:
lib_paths = Utils.to_list(kw['libpath'])
v = kw.get('env', self.env)
(libname, file) = (None, None)
if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
st_env_prefix = 'LIB'
files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
(libname, file) = find_library_from_list(lib, files)
if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
st_env_prefix = 'STATICLIB'
staticLibPattern = v['staticlib_PATTERN']
if self.env['CC_NAME'] == 'msvc':
staticLibPattern = 'lib' + staticLibPattern
files = libfiles(lib, staticLibPattern, lib_paths)
(libname, file) = find_library_from_list(lib, files)
if libname is not None:
v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
else:
v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
return
self.fatal('lib boost_' + lib + ' not found!')
@conf
def check_boost(self, *k, **kw):
"""
This should be the main entry point
- min_version
- max_version
- version
- include_path
- lib_path
- lib
- toolsettag - None or a regexp
- threadingtag - None or a regexp
- abitag - None or a regexp
- versiontag - WARNING: you should rather use version or min_version/max_version
- static - look for static libs (values:
'nostatic' or STATIC_NOSTATIC - ignore static libs (default)
'both' or STATIC_BOTH - find static libs, too
'onlystatic' or STATIC_ONLYSTATIC - find only static libs
- score_version
- score_abi
- scores_threading
- score_toolset
* the scores are tuples (match_score, nomatch_score)
match_score is the added to the score if the tag is matched
nomatch_score is added when a tag is found and does not match
- min_score
"""
if not self.env['CXX']:
self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
self.validate_boost(kw)
ret = None
try:
if not kw.get('found_includes', None):
self.check_message_1(kw.get('msg_includes', 'boost headers'))
ret = self.find_boost_includes(kw)
except Configure.ConfigurationError, e:
if 'errmsg' in kw:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
if 'okmsg' in kw:
self.check_message_2(kw.get('okmsg_includes', ret))
for lib in kw['lib']:
self.check_message_1('library boost_'+lib)
try:
self.find_boost_library(lib, kw)
except Configure.ConfigurationError, e:
ret = False
if 'errmsg' in kw:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
if 'okmsg' in kw:
self.check_message_2(kw['okmsg'])
return ret

102
tools/wafadmin/Tools/cc.py

@ -0,0 +1,102 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"Base for c programs/libraries"
import os
import TaskGen, Build, Utils, Task
from Logs import debug
import ccroot
from TaskGen import feature, before, extension, after
g_cc_flag_vars = [
'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
EXT_CC = ['.c']
g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
# TODO remove in waf 1.6
class cc_taskgen(ccroot.ccroot_abstract):
pass
@feature('cc')
@before('apply_type_vars')
@after('default_cc')
def init_cc(self):
self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
if not self.env['CC_NAME']:
raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
@feature('cc')
@after('apply_incpaths')
def apply_obj_vars_cc(self):
"""after apply_incpaths for INC_PATHS"""
env = self.env
app = env.append_unique
cpppath_st = env['CPPPATH_ST']
# local flags come first
# set the user-defined includes paths
for i in env['INC_PATHS']:
app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
# set the library include paths
for i in env['CPPPATH']:
app('_CCINCFLAGS', cpppath_st % i)
@feature('cc')
@after('apply_lib_vars')
def apply_defines_cc(self):
"""after uselib is set for CCDEFINES"""
self.defines = getattr(self, 'defines', [])
lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
milst = []
# now process the local defines
for defi in lst:
if not defi in milst:
milst.append(defi)
# CCDEFINES_
libs = self.to_list(self.uselib)
for l in libs:
val = self.env['CCDEFINES_'+l]
if val: milst += val
self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
y = self.env['CCDEFINES_ST']
self.env['_CCDEFFLAGS'] = [y%x for x in milst]
@extension(EXT_CC)
def c_hook(self, node):
# create the compilation task: cpp or cc
task = self.create_task('cc')
if getattr(self, 'obj_ext', None):
obj_ext = self.obj_ext
else:
obj_ext = '_%d.o' % self.idx
task.inputs = [node]
task.outputs = [node.change_ext(obj_ext)]
try:
self.compiled_tasks.append(task)
except AttributeError:
raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
return task
cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
cls.scan = ccroot.scan
cls.vars.append('CCDEPS')
link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada

613
tools/wafadmin/Tools/ccroot.py

@ -0,0 +1,613 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"base for all c/c++ programs and libraries"
import os, sys, re
import TaskGen, Task, Utils, preproc, Logs, Build, Options
from Logs import error, debug, warn
from Utils import md5
from TaskGen import taskgen, after, before, feature
from Constants import *
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import config_c # <- necessary for the configuration, do not touch
USE_TOP_LEVEL = False
def get_cc_version(conf, cc, gcc=False, icc=False):
cmd = cc + ['-dM', '-E', '-']
try:
p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
p.stdin.write('\n')
out = p.communicate()[0]
except:
conf.fatal('could not determine the compiler version %r' % cmd)
# PY3K: do not touch
out = str(out)
if gcc:
if out.find('__INTEL_COMPILER') >= 0:
conf.fatal('The intel compiler pretends to be gcc')
if out.find('__GNUC__') < 0:
conf.fatal('Could not determine the compiler type')
if icc and out.find('__INTEL_COMPILER') < 0:
conf.fatal('Not icc/icpc')
k = {}
if icc or gcc:
out = out.split('\n')
import shlex
for line in out:
lst = shlex.split(line)
if len(lst)>2:
key = lst[1]
val = lst[2]
k[key] = val
def isD(var):
return var in k
def isT(var):
return var in k and k[var] != '0'
# Some documentation is available at http://predef.sourceforge.net
# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
mp1 = {
'__linux__' : 'linux',
'__GNU__' : 'hurd',
'__FreeBSD__' : 'freebsd',
'__NetBSD__' : 'netbsd',
'__OpenBSD__' : 'openbsd',
'__sun' : 'sunos',
'__hpux' : 'hpux',
'__sgi' : 'irix',
'_AIX' : 'aix',
'__CYGWIN__' : 'cygwin',
'__MSYS__' : 'msys',
'_UWIN' : 'uwin',
'_WIN64' : 'win32',
'_WIN32' : 'win32',
}
for i in mp1:
if isD(i):
conf.env.DEST_OS = mp1[i]
break
else:
if isD('__APPLE__') and isD('__MACH__'):
conf.env.DEST_OS = 'darwin'
elif isD('__unix__'): # unix must be tested last as it's a generic fallback
conf.env.DEST_OS = 'generic'
if isD('__ELF__'):
conf.env.DEST_BINFMT = 'elf'
mp2 = {
'__x86_64__' : 'x86_64',
'__i386__' : 'x86',
'__ia64__' : 'ia',
'__mips__' : 'mips',
'__sparc__' : 'sparc',
'__alpha__' : 'alpha',
'__arm__' : 'arm',
'__hppa__' : 'hppa',
'__powerpc__' : 'powerpc',
}
for i in mp2:
if isD(i):
conf.env.DEST_CPU = mp2[i]
break
debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
return k
class DEBUG_LEVELS:
"""Will disappear in waf 1.6"""
ULTRADEBUG = "ultradebug"
DEBUG = "debug"
RELEASE = "release"
OPTIMIZED = "optimized"
CUSTOM = "custom"
ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
def scan(self):
"look for .h the .cpp need"
debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
# TODO waf 1.6 - assume the default input has exactly one file
if len(self.inputs) == 1:
node = self.inputs[0]
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
return (nodes, names)
all_nodes = []
all_names = []
seen = []
for node in self.inputs:
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r' % (str(node), nodes, names))
for x in nodes:
if id(x) in seen: continue
seen.append(id(x))
all_nodes.append(x)
for x in names:
if not x in all_names:
all_names.append(x)
return (all_nodes, all_names)
class ccroot_abstract(TaskGen.task_gen):
"Parent class for programs and libraries in languages c, c++ and moc (Qt)"
def __init__(self, *k, **kw):
# COMPAT remove in waf 1.6 TODO
if len(k) > 1:
k = list(k)
if k[1][0] != 'c':
k[1] = 'c' + k[1]
TaskGen.task_gen.__init__(self, *k, **kw)
def get_target_name(self):
tp = 'program'
for x in self.features:
if x in ['cshlib', 'cstaticlib']:
tp = x.lstrip('c')
pattern = self.env[tp + '_PATTERN']
if not pattern: pattern = '%s'
dir, name = os.path.split(self.target)
if self.env.DEST_BINFMT == 'pe' and getattr(self, 'vnum', None) and 'cshlib' in self.features:
# include the version in the dll file name,
# the import lib file name stays unversionned.
name = name + '-' + self.vnum.split('.')[0]
return os.path.join(dir, pattern % name)
@feature('cc', 'cxx')
@before('apply_core')
def default_cc(self):
"""compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
Utils.def_attrs(self,
includes = '',
defines= '',
rpaths = '',
uselib = '',
uselib_local = '',
add_objects = '',
p_flag_vars = [],
p_type_vars = [],
compiled_tasks = [],
link_task = None)
# The only thing we need for cross-compilation is DEST_BINFMT.
# At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
# Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
if not self.env.DEST_BINFMT:
# Infer the binary format from the os name.
self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
self.env.DEST_OS or Utils.unversioned_sys_platform())
if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
def apply_verif(self):
"""no particular order, used for diagnostic"""
if not (self.source or getattr(self, 'add_objects', None)):
raise Utils.WafError('no source files specified for %s' % self)
if not self.target:
raise Utils.WafError('no target for %s' % self)
# TODO reference the d programs, shlibs in d.py, not here
@feature('cprogram', 'dprogram')
@after('default_cc')
@before('apply_core')
def vars_target_cprogram(self):
self.default_install_path = self.env.BINDIR
self.default_chmod = O755
@after('default_cc')
@feature('cshlib', 'dshlib')
@before('apply_core')
def vars_target_cshlib(self):
if self.env.DEST_BINFMT == 'pe':
# set execute bit on libs to avoid 'permission denied' (issue 283)
self.default_chmod = O755
self.default_install_path = self.env.BINDIR
else:
self.default_install_path = self.env.LIBDIR
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
def default_link_install(self):
"""you may kill this method to inject your own installation for the first element
any other install should only process its own nodes and not those from the others"""
if self.install_path:
self.bld.install_files(self.install_path, [self.link_task.outputs[0]], env=self.env, chmod=self.chmod)
@feature('cc', 'cxx')
@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
def apply_incpaths(self):
"""used by the scanner
after processing the uselib for CPPPATH
after apply_core because some processing may add include paths
"""
lst = []
# TODO move the uselib processing out of here
for lib in self.to_list(self.uselib):
for path in self.env['CPPPATH_' + lib]:
if not path in lst:
lst.append(path)
if preproc.go_absolute:
for path in preproc.standard_includes:
if not path in lst:
lst.append(path)
for path in self.to_list(self.includes):
if not path in lst:
if preproc.go_absolute or not os.path.isabs(path):
lst.append(path)
else:
self.env.prepend_value('CPPPATH', path)
for path in lst:
node = None
if os.path.isabs(path):
if preproc.go_absolute:
node = self.bld.root.find_dir(path)
elif path[0] == '#':
node = self.bld.srcnode
if len(path) > 1:
node = node.find_dir(path[1:])
else:
node = self.path.find_dir(path)
if node:
self.env.append_value('INC_PATHS', node)
# TODO WAF 1.6
if USE_TOP_LEVEL:
self.env.append_value('INC_PATHS', self.bld.srcnode)
@feature('cc', 'cxx')
@after('init_cc', 'init_cxx')
@before('apply_lib_vars')
def apply_type_vars(self):
"""before apply_lib_vars because we modify uselib
after init_cc and init_cxx because web need p_type_vars
"""
for x in self.features:
if not x in ['cprogram', 'cstaticlib', 'cshlib']:
continue
x = x.lstrip('c')
# if the type defines uselib to add, add them
st = self.env[x + '_USELIB']
if st: self.uselib = self.uselib + ' ' + st
# each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
# so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
for var in self.p_type_vars:
compvar = '%s_%s' % (x, var)
#print compvar
value = self.env[compvar]
if value: self.env.append_value(var, value)
@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_core')
def apply_link(self):
"""executes after apply_core for collecting 'compiled_tasks'
use a custom linker if specified (self.link='name-of-custom-link-task')"""
link = getattr(self, 'link', None)
if not link:
if 'cstaticlib' in self.features: link = 'static_link'
elif 'cxx' in self.features: link = 'cxx_link'
else: link = 'cc_link'
tsk = self.create_task(link)
outputs = [t.outputs[0] for t in self.compiled_tasks]
tsk.set_inputs(outputs)
tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
self.link_task = tsk
@feature('cc', 'cxx')
@after('apply_link', 'init_cc', 'init_cxx')
def apply_lib_vars(self):
"""after apply_link because of 'link_task'
after default_cc because of the attribute 'uselib'"""
env = self.env
# 1. the case of the libs defined in the project (visit ancestors first)
# the ancestors external libraries (uselib) will be prepended
uselib = self.to_list(self.uselib)
names = self.to_list(self.uselib_local)
seen = []
tmp = names[:] # consume a copy of the list of names
while tmp:
lib_name = tmp.pop(0)
# visit dependencies only once
if lib_name in seen:
continue
y = self.name_to_obj(lib_name)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
y.post()
seen.append(lib_name)
# object has ancestors to process (shared libraries): add them to the end of the list
if getattr(y, 'uselib_local', None):
lst = y.to_list(y.uselib_local)
if 'cshlib' in y.features or 'cprogram' in y.features:
lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
tmp.extend(lst)
# link task and flags
if getattr(y, 'link_task', None):
link_name = y.target[y.target.rfind(os.sep) + 1:]
if 'cstaticlib' in y.features:
env.append_value('STATICLIB', link_name)
elif 'cshlib' in y.features or 'cprogram' in y.features:
# WARNING some linkers can link against programs
env.append_value('LIB', link_name)
# the order
self.link_task.set_run_after(y.link_task)
# for the recompilation
dep_nodes = getattr(self.link_task, 'dep_nodes', [])
self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
# add the link path too
tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
# add ancestors uselib too
# WARNING providing STATICLIB_FOO in env will result in broken builds
# TODO waf 1.6 prevent this behaviour somehow
for v in self.to_list(y.uselib):
if v in uselib: continue
uselib = [v] + uselib
# if the library task generator provides 'export_incdirs', add to the include path
# the export_incdirs must be a list of paths relative to the other library
if getattr(y, 'export_incdirs', None):
for x in self.to_list(y.export_incdirs):
node = y.path.find_dir(x)
if not node:
raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
self.env.append_unique('INC_PATHS', node)
# 2. the case of the libs defined outside
for x in uselib:
for v in self.p_flag_vars:
val = self.env[v + '_' + x]
if val: self.env.append_value(v, val)
@feature('cprogram', 'cstaticlib', 'cshlib')
@after('apply_link')
def apply_objdeps(self):
"add the .o files produced by some other object files in the same manner as uselib_local"
if not getattr(self, 'add_objects', None): return
seen = []
names = self.to_list(self.add_objects)
while names:
x = names[0]
# visit dependencies only once
if x in seen:
names = names[1:]
continue
# object does not exist ?
y = self.name_to_obj(x)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
# object has ancestors to process first ? update the list of names
if getattr(y, 'add_objects', None):
added = 0
lst = y.to_list(y.add_objects)
lst.reverse()
for u in lst:
if u in seen: continue
added = 1
names = [u]+names
if added: continue # list of names modified, loop
# safe to process the current object
y.post()
seen.append(x)
for t in y.compiled_tasks:
self.link_task.inputs.extend(t.outputs)
@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_lib_vars')
def apply_obj_vars(self):
"""after apply_lib_vars for uselib"""
v = self.env
lib_st = v['LIB_ST']
staticlib_st = v['STATICLIB_ST']
libpath_st = v['LIBPATH_ST']
staticlibpath_st = v['STATICLIBPATH_ST']
rpath_st = v['RPATH_ST']
app = v.append_unique
if v['FULLSTATIC']:
v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
for i in v['RPATH']:
if i and rpath_st:
app('LINKFLAGS', rpath_st % i)
for i in v['LIBPATH']:
app('LINKFLAGS', libpath_st % i)
app('LINKFLAGS', staticlibpath_st % i)
if v['STATICLIB']:
v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
k = [(staticlib_st % i) for i in v['STATICLIB']]
app('LINKFLAGS', k)
# fully static binaries ?
if not v['FULLSTATIC']:
if v['STATICLIB'] or v['LIB']:
v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
app('LINKFLAGS', [lib_st % i for i in v['LIB']])
@after('apply_link')
def process_obj_files(self):
if not hasattr(self, 'obj_files'): return
for x in self.obj_files:
node = self.path.find_resource(x)
self.link_task.inputs.append(node)
@taskgen
def add_obj_file(self, file):
"""Small example on how to link object files as if they were source
obj = bld.create_obj('cc')
obj.add_obj_file('foo.o')"""
if not hasattr(self, 'obj_files'): self.obj_files = []
if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
self.obj_files.append(file)
c_attrs = {
'cxxflag' : 'CXXFLAGS',
'cflag' : 'CCFLAGS',
'ccflag' : 'CCFLAGS',
'linkflag' : 'LINKFLAGS',
'ldflag' : 'LINKFLAGS',
'lib' : 'LIB',
'libpath' : 'LIBPATH',
'staticlib': 'STATICLIB',
'staticlibpath': 'STATICLIBPATH',
'rpath' : 'RPATH',
'framework' : 'FRAMEWORK',
'frameworkpath' : 'FRAMEWORKPATH'
}
@feature('cc', 'cxx')
@before('init_cxx', 'init_cc')
@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
def add_extra_flags(self):
"""case and plural insensitive
before apply_obj_vars for processing the library attributes
"""
for x in self.__dict__.keys():
y = x.lower()
if y[-1] == 's':
y = y[:-1]
if c_attrs.get(y, None):
self.env.append_unique(c_attrs[y], getattr(self, x))
# ============ the code above must not know anything about import libs ==========
@feature('cshlib')
@after('apply_link', 'default_cc')
@before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
def apply_implib(self):
"""On mswindows, handle dlls and their import libs
the .dll.a is the import lib and it is required for linking so it is installed too
"""
if not self.env.DEST_BINFMT == 'pe':
return
self.meths.remove('default_link_install')
bindir = self.install_path
if not bindir: return
# install the dll in the bin dir
dll = self.link_task.outputs[0]
self.bld.install_files(bindir, [dll], self.env, self.chmod)
# add linker flags to generate the import lib
implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
implib = dll.parent.find_or_declare(implib)
self.link_task.outputs.append(implib)
self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
# ============ the code above must not know anything about vnum processing on unix platforms =========
@feature('cshlib')
@after('apply_link')
@before('apply_lib_vars', 'default_link_install')
def apply_vnum(self):
"""
libfoo.so is installed as libfoo.so.1.2.3
"""
if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT != 'elf':
return
self.meths.remove('default_link_install')
link = self.link_task
nums = self.vnum.split('.')
node = link.outputs[0]
libname = node.name
if libname.endswith('.dylib'):
name3 = libname.replace('.dylib', '.%s.dylib' % task.vnum)
name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
else:
name3 = libname + '.' + self.vnum
name2 = libname + '.' + nums[0]
self.env.append_value('LINKFLAGS', (self.env['SONAME_ST'] % name2).split())
bld = self.bld
nums = self.vnum.split('.')
path = self.install_path
if not path: return
bld.install_as(path + os.sep + name3, node, env=self.env)
bld.symlink_as(path + os.sep + name2, name3)
bld.symlink_as(path + os.sep + libname, name3)
# the following task is just to enable execution from the build dir :-/
tsk = self.create_task('vnum')
tsk.set_inputs([node])
tsk.set_outputs(node.parent.find_or_declare(name2))
def exec_vnum_link(self):
path = self.inputs[0].parent.abspath(self.env)
try:
os.remove(self.outputs[0].abspath())
except OSError, e:
pass
try:
os.symlink(self.inputs[0].name, self.outputs[0].abspath(self.env))
except Exception, e:
return 1
cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
cls.quiet = 1

67
tools/wafadmin/Tools/compiler_cc.py

@ -0,0 +1,67 @@
#!/usr/bin/env python
# encoding: utf-8
# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
import os, sys, imp, types, ccroot
import optparse
import Utils, Configure, Options
from Logs import debug
c_compiler = {
'win32': ['msvc', 'gcc'],
'cygwin': ['gcc'],
'darwin': ['gcc'],
'aix5': ['gcc'],
'linux': ['gcc', 'icc', 'suncc'],
'sunos': ['gcc', 'suncc'],
'irix': ['gcc'],
'hpux': ['gcc'],
'default': ['gcc']
}
def __list_possible_compiler(platform):
try:
return c_compiler[platform]
except KeyError:
return c_compiler["default"]
def detect(conf):
"""
for each compiler for the platform, try to configure the compiler
in theory the tools should raise a configuration error if the compiler
pretends to be something it is not (setting CC=icc and trying to configure gcc)
"""
try: test_for_compiler = Options.options.check_c_compiler
except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
for compiler in test_for_compiler.split():
try:
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cc: %r' % e)
else:
if conf.env['CC']:
conf.check_message(compiler, '', True)
conf.env['COMPILER_CC'] = compiler
break
conf.check_message(compiler, '', False)
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = __list_possible_compiler(build_platform)
test_for_compiler = ' '.join(possible_compiler_list)
cc_compiler_opts = opt.add_option_group("C Compiler Options")
cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
dest="check_c_compiler")
for c_compiler in test_for_compiler.split():
opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)
"""opt.add_option('-d', '--debug-level',
action = 'store',
default = ccroot.DEBUG_LEVELS.RELEASE,
help = "Specify the debug level, does nothing if CFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL),
choices = ccroot.DEBUG_LEVELS.ALL,
dest = 'debug_level')"""

61
tools/wafadmin/Tools/compiler_cxx.py

@ -0,0 +1,61 @@
#!/usr/bin/env python
# encoding: utf-8
# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
import os, sys, imp, types, ccroot
import optparse
import Utils, Configure, Options
from Logs import debug
cxx_compiler = {
'win32': ['msvc', 'g++'],
'cygwin': ['g++'],
'darwin': ['g++'],
'aix': ['g++'],
'linux': ['g++', 'icpc', 'sunc++'],
'sunos': ['g++', 'sunc++'],
'irix': ['g++'],
'hpux': ['g++'],
'default': ['g++']
}
def __list_possible_compiler(platform):
try:
return cxx_compiler[platform]
except KeyError:
return cxx_compiler["default"]
def detect(conf):
try: test_for_compiler = Options.options.check_cxx_compiler
except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
for compiler in test_for_compiler.split():
try:
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cxx: %r' % e)
else:
if conf.env['CXX']:
conf.check_message(compiler, '', True)
conf.env['COMPILER_CXX'] = compiler
break
conf.check_message(compiler, '', False)
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = __list_possible_compiler(build_platform)
test_for_compiler = ' '.join(possible_compiler_list)
cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
dest="check_cxx_compiler")
for cxx_compiler in test_for_compiler.split():
opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)
"""opt.add_option('-d', '--debug-level',
action = 'store',
default = ccroot.DEBUG_LEVELS.RELEASE,
help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s']" % "', '".join(ccroot.DEBUG_LEVELS.ALL),
choices = ccroot.DEBUG_LEVELS.ALL,
dest = 'debug_level')"""

33
tools/wafadmin/Tools/compiler_d.py

@ -0,0 +1,33 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
import os, sys, imp, types
import Utils, Configure, Options
def detect(conf):
if getattr(Options.options, 'check_dmd_first', None):
test_for_compiler = ['dmd', 'gdc']
else:
test_for_compiler = ['gdc', 'dmd']
for d_compiler in test_for_compiler:
try:
conf.check_tool(d_compiler)
except:
pass
else:
break
else:
conf.fatal('no suitable d compiler was found')
def set_options(opt):
d_compiler_opts = opt.add_option_group('D Compiler Options')
d_compiler_opts.add_option('--check-dmd-first', action='store_true',
help='checks for the gdc compiler before dmd (default is the other way round)',
dest='check_dmd_first',
default=False)
for d_compiler in ['gdc', 'dmd']:
opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)

680
tools/wafadmin/Tools/config_c.py

@ -0,0 +1,680 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"""
c/c++ configuration routines
"""
import os, imp, sys, shlex, shutil
from Utils import md5
import Build, Utils, Configure, Task, Options, Logs, TaskGen
from Constants import *
from Configure import conf, conftest
cfg_ver = {
'atleast-version': '>=',
'exact-version': '==',
'max-version': '<=',
}
SNIP1 = '''
int main() {
void *p;
p=(void*)(%s);
return 0;
}
'''
SNIP2 = '''
int main() {
if ((%(type_name)s *) 0) return 0;
if (sizeof (%(type_name)s)) return 0;
}
'''
SNIP3 = '''
int main() {
return 0;
}
'''
def parse_flags(line, uselib, env):
"""pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
lst = shlex.split(line)
while lst:
x = lst.pop(0)
st = x[:2]
ot = x[2:]
if st == '-I' or st == '/I':
if not ot: ot = lst.pop(0)
env.append_unique('CPPPATH_' + uselib, ot)
elif st == '-D':
if not ot: ot = lst.pop(0)
env.append_unique('CXXDEFINES_' + uselib, ot)
env.append_unique('CCDEFINES_' + uselib, ot)
elif st == '-l':
if not ot: ot = lst.pop(0)
env.append_unique('LIB_' + uselib, ot)
elif st == '-L':
if not ot: ot = lst.pop(0)
env.append_unique('LIBPATH_' + uselib, ot)
elif x == '-pthread' or x.startswith('+'):
env.append_unique('CCFLAGS_' + uselib, x)
env.append_unique('CXXFLAGS_' + uselib, x)
env.append_unique('LINKFLAGS_' + uselib, x)
elif x == '-framework':
framework = lst.pop(0)
env.append_unique('FRAMEWORK_' + uselib, framework)
elif x.startswith('-std'):
env.append_unique('CCFLAGS_' + uselib, x)
env.append_unique('LINKFLAGS_' + uselib, x)
elif x.startswith('-Wl'):
env.append_unique('LINKFLAGS_' + uselib, x)
elif x.startswith('-m') or x.startswith('-f'):
env.append_unique('CCFLAGS_' + uselib, x)
env.append_unique('CXXFLAGS_' + uselib, x)
@conf
def ret_msg(self, f, kw):
"""execute a function, when provided"""
if isinstance(f, str):
return f
return f(kw)
@conf
def validate_cfg(self, kw):
if not 'path' in kw:
kw['path'] = 'pkg-config --errors-to-stdout --print-errors'
# pkg-config version
if 'atleast_pkgconfig_version' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
return
# pkg-config --modversion
if 'modversion' in kw:
return
# checking for the version of a module, for the moment, one thing at a time
for x in cfg_ver.keys():
y = x.replace('-', '_')
if y in kw:
if not 'package' in kw:
raise ValueError('%s requires a package' % x)
if not 'msg' in kw:
kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
return
if not 'msg' in kw:
kw['msg'] = 'Checking for %s' % kw['package']
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
@conf
def cmd_and_log(self, cmd, kw):
Logs.debug('runner: %s\n' % cmd)
if self.log: self.log.write('%s\n' % cmd)
try:
p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, shell=True)
output = p.communicate()[0]
except OSError:
self.fatal('fail')
if p.returncode:
if not kw.get('errmsg', ''):
if kw.get('mandatory', False):
kw['errmsg'] = output.strip()
else:
kw['errmsg'] = 'fail'
self.fatal('fail')
return output
@conf
def exec_cfg(self, kw):
# pkg-config version
if 'atleast_pkgconfig_version' in kw:
cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
self.cmd_and_log(cmd, kw)
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
return
# checking for the version of a module
for x in cfg_ver:
y = x.replace('-', '_')
if y in kw:
self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
break
# retrieving the version of a module
if 'modversion' in kw:
version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
return version
lst = [kw['path']]
for key, val in kw.get('define_variable', {}).iteritems():
lst.append('--define-variable=%s=%s' % (key, val))
lst.append(kw.get('args', ''))
lst.append(kw['package'])
# so we assume the command-line will output flags to be parsed afterwards
cmd = ' '.join(lst)
ret = self.cmd_and_log(cmd, kw)
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
return ret
@conf
def check_cfg(self, *k, **kw):
self.validate_cfg(kw)
if 'msg' in kw:
self.check_message_1(kw['msg'])
ret = None
try:
ret = self.exec_cfg(kw)
except Configure.ConfigurationError, e:
if 'errmsg' in kw:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw and kw['mandatory']:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
kw['success'] = ret
if 'okmsg' in kw:
self.check_message_2(self.ret_msg(kw['okmsg'], kw))
return ret
# the idea is the following: now that we are certain
# that all the code here is only for c or c++, it is
# easy to put all the logic in one function
#
# this should prevent code duplication (ita)
# env: an optional environment (modified -> provide a copy)
# compiler: cc or cxx - it tries to guess what is best
# type: program, shlib, staticlib, objects
# code: a c code to execute
# uselib_store: where to add the variables
# uselib: parameters to use for building
# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
# execute: True or False - will return the result of the execution
@conf
def validate_c(self, kw):
"""validate the parameters for the test method"""
if not 'env' in kw:
kw['env'] = self.env.copy()
env = kw['env']
if not 'compiler' in kw:
kw['compiler'] = 'cc'
if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
kw['compiler'] = 'cxx'
if not self.env['CXX']:
self.fatal('a c++ compiler is required')
else:
if not self.env['CC']:
self.fatal('a c compiler is required')
if not 'type' in kw:
kw['type'] = 'cprogram'
assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
#if kw['type'] != 'program' and kw.get('execute', 0):
# raise ValueError, 'can only execute programs'
def to_header(dct):
if 'header_name' in dct:
dct = Utils.to_list(dct['header_name'])
return ''.join(['#include <%s>\n' % x for x in dct])
return ''
# set the file name
if not 'compile_mode' in kw:
kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
if not 'compile_filename' in kw:
kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
#OSX
if 'framework_name' in kw:
try: TaskGen.task_gen.create_task_macapp
except AttributeError: self.fatal('frameworks require the osx tool')
fwkname = kw['framework_name']
if not 'uselib_store' in kw:
kw['uselib_store'] = fwkname.upper()
if not kw.get('no_header', False):
if not 'header_name' in kw:
kw['header_name'] = []
fwk = '%s/%s.h' % (fwkname, fwkname)
if kw.get('remove_dot_h', None):
fwk = fwk[:-2]
kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
kw['msg'] = 'Checking for framework %s' % fwkname
kw['framework'] = fwkname
#kw['frameworkpath'] = set it yourself
if 'function_name' in kw:
fu = kw['function_name']
if not 'msg' in kw:
kw['msg'] = 'Checking for function %s' % fu
kw['code'] = to_header(kw) + SNIP1 % fu
if not 'uselib_store' in kw:
kw['uselib_store'] = fu.upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(fu)
elif 'type_name' in kw:
tu = kw['type_name']
if not 'msg' in kw:
kw['msg'] = 'Checking for type %s' % tu
if not 'header_name' in kw:
kw['header_name'] = 'stdint.h'
kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
if not 'define_name' in kw:
kw['define_name'] = self.have_define(tu.upper())
elif 'header_name' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for header %s' % kw['header_name']
l = Utils.to_list(kw['header_name'])
assert len(l)>0, 'list of headers in header_name is empty'
kw['code'] = to_header(kw) + SNIP3
if not 'uselib_store' in kw:
kw['uselib_store'] = l[0].upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(l[0])
if 'lib' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for library %s' % kw['lib']
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['lib'].upper()
if 'staticlib' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for static library %s' % kw['staticlib']
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['staticlib'].upper()
if 'fragment' in kw:
# an additional code fragment may be provided to replace the predefined code
# in custom headers
kw['code'] = kw['fragment']
if not 'msg' in kw:
kw['msg'] = 'Checking for custom code'
if not 'errmsg' in kw:
kw['errmsg'] = 'fail'
for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
if flagsname in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
if not 'errmsg' in kw:
kw['errmsg'] = 'fail'
if not 'execute' in kw:
kw['execute'] = False
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
if not 'okmsg' in kw:
kw['okmsg'] = 'ok'
if not 'code' in kw:
kw['code'] = SNIP3
if not kw.get('success'): kw['success'] = None
assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
@conf
def post_check(self, *k, **kw):
"set the variables after a test was run successfully"
is_success = 0
if kw['execute']:
if kw['success']:
is_success = kw['success']
else:
is_success = (kw['success'] == 0)
def define_or_stuff():
nm = kw['define_name']
if kw['execute'] and kw.get('define_ret', None) and isinstance(is_success, str):
self.define(kw['define_name'], is_success, quote=kw.get('quote', 1))
else:
self.define_cond(kw['define_name'], is_success)
if 'define_name' in kw:
if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
define_or_stuff()
if is_success and 'uselib_store' in kw:
import cc, cxx
for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
lk = k.lower()
# inconsistency: includes -> CPPPATH
if k == 'CPPPATH': lk = 'includes'
if k == 'CXXDEFINES': lk = 'defines'
if k == 'CCDEFINES': lk = 'defines'
if lk in kw:
val = kw[lk]
# remove trailing slash
if isinstance(val, str):
val = val.rstrip(os.path.sep)
self.env.append_unique(k + '_' + kw['uselib_store'], val)
@conf
def check(self, *k, **kw):
# so this will be the generic function
# it will be safer to use check_cxx or check_cc
self.validate_c(kw)
self.check_message_1(kw['msg'])
ret = None
try:
ret = self.run_c_code(*k, **kw)
except Configure.ConfigurationError, e:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw and kw['mandatory']:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
kw['success'] = ret
self.check_message_2(self.ret_msg(kw['okmsg'], kw))
self.post_check(*k, **kw)
if not kw.get('execute', False):
return ret == 0
return ret
@conf
def run_c_code(self, *k, **kw):
test_f_name = kw['compile_filename']
k = 0
while k < 10000:
# make certain to use a fresh folder - necessary for win32
dir = os.path.join(self.blddir, '.conf_check_%d' % k)
# if the folder already exists, remove it
try:
shutil.rmtree(dir)
except OSError:
pass
try:
os.stat(dir)
except OSError:
break
k += 1
try:
os.makedirs(dir)
except:
self.fatal('cannot create a configuration test folder %r' % dir)
try:
os.stat(dir)
except:
self.fatal('cannot use the configuration test folder %r' % dir)
bdir = os.path.join(dir, 'testbuild')
if not os.path.exists(bdir):
os.makedirs(bdir)
env = kw['env']
dest = open(os.path.join(dir, test_f_name), 'w')
dest.write(kw['code'])
dest.close()
back = os.path.abspath('.')
bld = Build.BuildContext()
bld.log = self.log
bld.all_envs.update(self.all_envs)
bld.all_envs['default'] = env
bld.lst_variants = bld.all_envs.keys()
bld.load_dirs(dir, bdir)
os.chdir(dir)
bld.rescan(bld.srcnode)
o = bld.new_task_gen(features=[kw['compile_mode'], kw['type']], source=test_f_name, target='testprog')
for k, v in kw.iteritems():
setattr(o, k, v)
self.log.write("==>\n%s\n<==\n" % kw['code'])
# compile the program
try:
bld.compile()
except Utils.WafError:
ret = Utils.ex_stack()
else:
ret = 0
# chdir before returning
os.chdir(back)
if ret:
self.log.write('command returned %r' % ret)
self.fatal(str(ret))
# keep the name of the program to execute
if kw['execute']:
lastprog = o.link_task.outputs[0].abspath(env)
# if we need to run the program, try to get its result
if kw['execute']:
args = Utils.to_list(kw.get('exec_args', []))
try:
data = Utils.cmd_output([lastprog] + args).strip()
except ValueError, e:
self.fatal(Utils.ex_stack())
ret = data
return ret
@conf
def check_cxx(self, *k, **kw):
kw['compiler'] = 'cxx'
return self.check(*k, **kw)
@conf
def check_cc(self, *k, **kw):
kw['compiler'] = 'cc'
return self.check(*k, **kw)
@conf
def define(self, define, value, quote=1):
"""store a single define and its state into an internal list for later
writing to a config header file. Value can only be
a string or int; other types not supported. String
values will appear properly quoted in the generated
header file."""
assert define and isinstance(define, str)
# ordered_dict is for writing the configuration header in order
tbl = self.env[DEFINES] or Utils.ordered_dict()
# the user forgot to tell if the value is quoted or not
if isinstance(value, str):
if quote:
tbl[define] = '"%s"' % str(value)
else:
tbl[define] = value
elif isinstance(value, int):
tbl[define] = value
else:
raise TypeError('define %r -> %r must be a string or an int' % (define, value))
# add later to make reconfiguring faster
self.env[DEFINES] = tbl
self.env[define] = value # <- not certain this is necessary
@conf
def undefine(self, define):
"""store a single define and its state into an internal list
for later writing to a config header file"""
assert define and isinstance(define, str)
tbl = self.env[DEFINES] or Utils.ordered_dict()
value = UNDEFINED
tbl[define] = value
# add later to make reconfiguring faster
self.env[DEFINES] = tbl
self.env[define] = value
@conf
def define_cond(self, name, value):
"""Conditionally define a name.
Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
if value:
self.define(name, 1)
else:
self.undefine(name)
@conf
def is_defined(self, key):
defines = self.env[DEFINES]
if not defines:
return False
try:
value = defines[key]
except KeyError:
return False
else:
return value != UNDEFINED
@conf
def get_define(self, define):
"get the value of a previously stored define"
try: return self.env[DEFINES][define]
except KeyError: return None
@conf
def have_define(self, name):
"prefix the define with 'HAVE_' and make sure it has valid characters."
return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
@conf
def write_config_header(self, configfile='', env='', guard='', top=False):
"save the defines into a file"
if not configfile: configfile = WAF_CONFIG_H
waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
# configfile -> absolute path
# there is a good reason to concatenate first and to split afterwards
if not env: env = self.env
if top:
diff = ''
else:
diff = Utils.diff_path(self.srcdir, self.curdir)
full = os.sep.join([self.blddir, env.variant(), diff, configfile])
full = os.path.normpath(full)
(dir, base) = os.path.split(full)
try: os.makedirs(dir)
except: pass
dest = open(full, 'w')
dest.write('/* Configuration header created by Waf - do not edit */\n')
dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
dest.write(self.get_config_header())
# config files are not removed on "waf clean"
env.append_value(CFG_FILES, os.path.join(diff, configfile))
dest.write('\n#endif /* %s */\n' % waf_guard)
dest.close()
@conf
def get_config_header(self):
"""Fill-in the contents of the config header. Override when you need to write your own config header."""
config_header = []
tbl = self.env[DEFINES] or Utils.ordered_dict()
for key in tbl.allkeys:
value = tbl[key]
if value is None:
config_header.append('#define %s' % key)
elif value is UNDEFINED:
config_header.append('/* #undef %s */' % key)
else:
config_header.append('#define %s %s' % (key, value))
return "\n".join(config_header)
@conftest
def find_cpp(conf):
v = conf.env
cpp = None
if v['CPP']: cpp = v['CPP']
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
if not cpp: cpp = conf.find_program('cpp', var='CPP')
if not cpp: cpp = v['CC']
if not cpp: cpp = v['CXX']
v['CPP'] = cpp
@conftest
def cc_add_flags(conf):
conf.add_os_flags('CFLAGS', 'CCFLAGS')
conf.add_os_flags('CPPFLAGS')
conf.add_os_flags('LINKFLAGS')
@conftest
def cxx_add_flags(conf):
conf.add_os_flags('CXXFLAGS')
conf.add_os_flags('CPPFLAGS')
conf.add_os_flags('LINKFLAGS')
@conftest
def cc_load_tools(conf):
conf.check_tool('cc')
@conftest
def cxx_load_tools(conf):
conf.check_tool('cxx')

69
tools/wafadmin/Tools/cs.py

@ -0,0 +1,69 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"C# support"
import TaskGen, Utils, Task
from Logs import error
from TaskGen import before, after, taskgen, feature
flag_vars= ['FLAGS', 'ASSEMBLIES']
@feature('cs')
def init_cs(self):
Utils.def_attrs(self,
flags = '',
assemblies = '',
resources = '',
uselib = '')
@feature('cs')
@after('init_cs')
def apply_uselib_cs(self):
if not self.uselib:
return
global flag_vars
for var in self.to_list(self.uselib):
for v in self.flag_vars:
val = self.env[v+'_'+var]
if val: self.env.append_value(v, val)
@feature('cs')
@after('apply_uselib_cs')
@before('apply_core')
def apply_cs(self):
try: self.meths.remove('apply_core')
except ValueError: pass
# process the flags for the assemblies
assemblies_flags = []
for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
assemblies_flags += '/r:'+i
self.env['_ASSEMBLIES'] += assemblies_flags
# process the flags for the resources
for i in self.to_list(self.resources):
self.env['_RESOURCES'].append('/resource:'+i)
# additional flags
self.env['_FLAGS'] += self.to_list(self.flags) + self.env['FLAGS']
curnode = self.path
# process the sources
nodes = []
for i in self.to_list(self.source):
nodes.append(curnode.find_resource(i))
# create the task
task = self.create_task('mcs')
task.inputs = nodes
task.set_outputs(self.path.find_or_declare(self.target))
Task.simple_task_type('mcs', '${MCS} ${SRC} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
def detect(conf):
mcs = conf.find_program('mcs', var='MCS')
if not mcs: mcs = conf.find_program('gmcs', var='MCS')

106
tools/wafadmin/Tools/cxx.py

@ -0,0 +1,106 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"Base for c++ programs and libraries"
import TaskGen, Task, Utils
from Logs import debug
import ccroot # <- do not remove
from TaskGen import feature, before, extension, after
g_cxx_flag_vars = [
'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
"main cpp variables"
EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
# TODO remove in waf 1.6
class cxx_taskgen(ccroot.ccroot_abstract):
pass
@feature('cxx')
@before('apply_type_vars')
@after('default_cc')
def init_cxx(self):
if not 'cc' in self.features:
self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
if not self.env['CXX_NAME']:
raise Utils.WafError("At least one compiler (g++, ..) must be selected")
@feature('cxx')
@after('apply_incpaths')
def apply_obj_vars_cxx(self):
"""after apply_incpaths for INC_PATHS"""
env = self.env
app = env.append_unique
cxxpath_st = env['CPPPATH_ST']
# local flags come first
# set the user-defined includes paths
for i in env['INC_PATHS']:
app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
# set the library include paths
for i in env['CPPPATH']:
app('_CXXINCFLAGS', cxxpath_st % i)
@feature('cxx')
@after('apply_lib_vars')
def apply_defines_cxx(self):
"""after uselib is set for CXXDEFINES"""
self.defines = getattr(self, 'defines', [])
lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
milst = []
# now process the local defines
for defi in lst:
if not defi in milst:
milst.append(defi)
# CXXDEFINES_USELIB
libs = self.to_list(self.uselib)
for l in libs:
val = self.env['CXXDEFINES_'+l]
if val: milst += self.to_list(val)
self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
y = self.env['CXXDEFINES_ST']
self.env['_CXXDEFFLAGS'] = [y%x for x in milst]
@extension(EXT_CXX)
def cxx_hook(self, node):
# create the compilation task: cpp or cc
task = self.create_task('cxx')
if getattr(self, 'obj_ext', None):
obj_ext = self.obj_ext
else:
obj_ext = '_%d.o' % self.idx
task.inputs = [node]
task.outputs = [node.change_ext(obj_ext)]
try:
self.compiled_tasks.append(task)
except AttributeError:
raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
return task
cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
cls.scan = ccroot.scan
cls.vars.append('CXXDEPS')
link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada

532
tools/wafadmin/Tools/d.py

@ -0,0 +1,532 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2007-2008 (ita)
import os, sys, re, optparse
import ccroot # <- leave this
import TaskGen, Utils, Task, Configure, Logs, Build
from Logs import debug, error
from TaskGen import taskgen, feature, after, before, extension
from Configure import conftest
EXT_D = ['.d', '.di', '.D']
D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
def filter_comments(filename):
txt = Utils.readf(filename)
buf = []
i = 0
max = len(txt)
while i < max:
c = txt[i]
# skip a string
if c == '"':
i += 1
c = ''
while i < max:
p = c
c = txt[i]
i += 1
if i == max: return buf
if c == '"':
cnt = 0
while i < cnt and i < max:
#print "cntcnt = ", str(cnt), self.txt[self.i-2-cnt]
if txt[i-2-cnt] == '\\': cnt+=1
else: break
#print "cnt is ", str(cnt)
if (cnt%2)==0: break
# i -= 1 # <- useless in practice
# skip a char
elif c == "'":
i += 1
if i == max: return buf
c = txt[i]
if c == '\\':
i += 1
if i == max: return buf
c = txt[i]
if c == 'x':
i += 2 # skip two chars
elif c == 'u':
i += 4 # skip unicode chars
i += 1
if i == max: return buf
c = txt[i]
if c != '\'': error("uh-oh, invalid character")
# skip a comment
elif c == '/':
if i == max: break
c = txt[i+1]
# eat /+ +/ comments
if c == '+':
i += 1
nesting = 1
prev = 0
while i < max:
c = txt[i]
if c == '+':
prev = 1
elif c == '/':
if prev:
nesting -= 1
if nesting == 0: break
else:
if i < max:
i += 1
c = txt[i]
if c == '+':
nesting += 1
else:
return buf
else:
prev = 0
i += 1
# eat /* */ comments
elif c == '*':
i += 1
while i < max:
c = txt[i]
if c == '*':
prev = 1
elif c == '/':
if prev: break
else:
prev = 0
i += 1
# eat // comments
elif c == '/':
i += 1
c = txt[i]
while i < max and c != '\n':
i += 1
c = txt[i]
# a valid char, add it to the buffer
else:
buf.append(c)
i += 1
return buf
class d_parser(object):
def __init__(self, env, incpaths):
#self.code = ''
#self.module = ''
#self.imports = []
self.allnames = []
self.re_module = re.compile("module\s+([^;]+)")
self.re_import = re.compile("import\s+([^;]+)")
self.re_import_bindings = re.compile("([^:]+):(.*)")
self.re_import_alias = re.compile("[^=]+=(.+)")
self.env = env
self.nodes = []
self.names = []
self.incpaths = incpaths
def tryfind(self, filename):
found = 0
for n in self.incpaths:
found = n.find_resource(filename.replace('.', '/') + '.d')
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)
def get_strings(self, code):
#self.imports = []
self.module = ''
lst = []
# get the module name (if present)
mod_name = self.re_module.search(code)
if mod_name:
self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
# go through the code, have a look at all import occurrences
# first, lets look at anything beginning with "import" and ending with ";"
import_iterator = self.re_import.finditer(code)
if import_iterator:
for import_match in import_iterator:
import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
# does this end with an import bindings declaration?
# (import bindings always terminate the list of imports)
bindings_match = self.re_import_bindings.match(import_match_str)
if bindings_match:
import_match_str = bindings_match.group(1)
# if so, extract the part before the ":" (since the module declaration(s) is/are located there)
# split the matching string into a bunch of strings, separated by a comma
matches = import_match_str.split(',')
for match in matches:
alias_match = self.re_import_alias.match(match)
if alias_match:
# is this an alias declaration? (alias = module name) if so, extract the module name
match = alias_match.group(1)
lst.append(match)
return lst
def start(self, node):
self.waiting = [node]
# while the stack is not empty, add the dependencies
while self.waiting:
nd = self.waiting.pop(0)
self.iter(nd)
def iter(self, node):
path = node.abspath(self.env) # obtain the absolute path
code = "".join(filter_comments(path)) # read the file and filter the comments
names = self.get_strings(code) # obtain the import strings
for x in names:
# optimization
if x in self.allnames: continue
self.allnames.append(x)
# for each name, see if it is like a node or not
self.tryfind(x)
def scan(self):
"look for .d/.di the .d source need"
env = self.env
gruik = d_parser(env, env['INC_PATHS'])
gruik.start(self.inputs[0])
if Logs.verbose:
debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
#debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
return (gruik.nodes, gruik.names)
def get_target_name(self):
"for d programs and libs"
v = self.env
tp = 'program'
for x in self.features:
if x in ['dshlib', 'dstaticlib']:
tp = x.lstrip('d')
return v['D_%s_PATTERN' % tp] % self.target
d_params = {
'dflags': '',
'importpaths':'',
'libs':'',
'libpaths':'',
'generate_headers':False,
}
@feature('d')
@before('apply_type_vars')
def init_d(self):
for x in d_params:
setattr(self, x, getattr(self, x, d_params[x]))
class d_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
# COMPAT
if len(k) > 1:
self.features.append('d' + k[1])
# okay, we borrow a few methods from ccroot
TaskGen.bind_feature('d', D_METHS)
@feature('d')
@before('apply_d_libs')
def init_d(self):
Utils.def_attrs(self,
dflags='',
importpaths='',
libs='',
libpaths='',
uselib='',
uselib_local='',
generate_headers=False, # set to true if you want .di files as well as .o
compiled_tasks=[],
add_objects=[],
link_task=None)
@feature('d')
@after('apply_d_link')
@before('apply_vnum')
def apply_d_libs(self):
uselib = self.to_list(self.uselib)
seen = []
local_libs = self.to_list(self.uselib_local)
libs = []
libpaths = []
env = self.env
while local_libs:
x = local_libs.pop()
# visit dependencies only once
if x in seen:
continue
else:
seen.append(x)
y = self.name_to_obj(x)
if not y:
raise Utils.WafError('object not found in uselib_local: obj %s uselib %s' % (self.name, x))
# object has ancestors to process first ? update the list of names
if y.uselib_local:
added = 0
lst = y.to_list(y.uselib_local)
lst.reverse()
for u in lst:
if u in seen: continue
added = 1
local_libs = [u]+local_libs
if added: continue # list of names modified, loop
# safe to process the current object
y.post()
seen.append(x)
libname = y.target[y.target.rfind(os.sep) + 1:]
if 'dshlib' in y.features or 'dstaticlib' in y.features:
#libs.append(y.target)
env.append_unique('DLINKFLAGS', env['DLIBPATH_ST'] % y.link_task.outputs[0].parent.bldpath(env))
env.append_unique('DLINKFLAGS', env['DLIB_ST'] % libname)
# add the link path too
tmp_path = y.path.bldpath(env)
if not tmp_path in libpaths: libpaths = [tmp_path] + libpaths
# set the dependency over the link task
if y.link_task is not None:
self.link_task.set_run_after(y.link_task)
dep_nodes = getattr(self.link_task, 'dep_nodes', [])
self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
# add ancestors uselib too
# TODO potential problems with static libraries ?
morelibs = y.to_list(y.uselib)
for v in morelibs:
if v in uselib: continue
uselib = [v]+uselib
self.uselib = uselib
@feature('dprogram', 'dshlib', 'dstaticlib')
@after('apply_core')
def apply_d_link(self):
link = getattr(self, 'link', None)
if not link:
if 'dstaticlib' in self.features: link = 'static_link'
else: link = 'd_link'
linktask = self.create_task(link)
outputs = [t.outputs[0] for t in self.compiled_tasks]
linktask.set_inputs(outputs)
linktask.set_outputs(self.path.find_or_declare(get_target_name(self)))
self.link_task = linktask
@feature('d')
@after('apply_core')
def apply_d_vars(self):
env = self.env
dpath_st = env['DPATH_ST']
lib_st = env['DLIB_ST']
libpath_st = env['DLIBPATH_ST']
#dflags = []
importpaths = self.to_list(self.importpaths)
libpaths = []
libs = []
uselib = self.to_list(self.uselib)
for i in uselib:
if env['DFLAGS_' + i]:
env.append_unique('DFLAGS', env['DFLAGS_' + i])
for x in self.features:
if not x in ['dprogram', 'dstaticlib', 'dshlib']:
continue
x.lstrip('d')
d_shlib_dflags = env['D_' + x + '_DFLAGS']
if d_shlib_dflags:
env.append_unique('DFLAGS', d_shlib_dflags)
# add import paths
for i in uselib:
if env['DPATH_' + i]:
for entry in self.to_list(env['DPATH_' + i]):
if not entry in importpaths:
importpaths.append(entry)
# now process the import paths
for path in importpaths:
if os.path.isabs(path):
env.append_unique('_DIMPORTFLAGS', dpath_st % path)
else:
node = self.path.find_dir(path)
self.env.append_unique('INC_PATHS', node)
env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
# add library paths
for i in uselib:
if env['LIBPATH_' + i]:
for entry in self.to_list(env['LIBPATH_' + i]):
if not entry in libpaths:
libpaths += [entry]
libpaths = self.to_list(self.libpaths) + libpaths
# now process the library paths
for path in libpaths:
env.append_unique('DLINKFLAGS', libpath_st % path)
# add libraries
for i in uselib:
if env['LIB_' + i]:
for entry in self.to_list(env['LIB_' + i]):
if not entry in libs:
libs += [entry]
libs = libs + self.to_list(self.libs)
# now process the libraries
for lib in libs:
env.append_unique('DLINKFLAGS', lib_st % lib)
# add linker flags
for i in uselib:
dlinkflags = env['DLINKFLAGS_' + i]
if dlinkflags:
for linkflag in dlinkflags:
env.append_unique('DLINKFLAGS', linkflag)
@feature('dshlib')
@after('apply_d_vars')
def add_shlib_d_flags(self):
for linkflag in self.env['D_shlib_LINKFLAGS']:
self.env.append_unique('DLINKFLAGS', linkflag)
@extension(EXT_D)
def d_hook(self, node):
# create the compilation task: cpp or cc
task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task.inputs = [node]
task.outputs = [node.change_ext(obj_ext)]
self.compiled_tasks.append(task)
if self.generate_headers:
header_node = node.change_ext(self.env['DHEADER_ext'])
task.outputs += [header_node]
d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
${D_HDR_F}${TGT[1].bldpath(env)} \
${D_SRC_F}${SRC} \
${D_TGT_F}${TGT[0].bldpath(env)}'
link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
def override_exec(cls):
"""stupid dmd wants -of stuck to the file name"""
old_exec = cls.exec_command
def exec_command(self, *k, **kw):
if isinstance(k[0], list):
lst = k[0]
for i in xrange(len(lst)):
if lst[i] == '-of':
del lst[i]
lst[i] = '-of' + lst[i]
break
return old_exec(self, *k, **kw)
cls.exec_command = exec_command
cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
cls.scan = scan
override_exec(cls)
cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
override_exec(cls)
cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
override_exec(cls)
# for feature request #104
@taskgen
def generate_header(self, filename, install_path):
if not hasattr(self, 'header_lst'): self.header_lst = []
self.meths.append('process_header')
self.header_lst.append([filename, install_path])
@before('apply_core')
def process_header(self):
env = self.env
for i in getattr(self, 'header_lst', []):
node = self.path.find_resource(i[0])
if not node:
raise Utils.WafError('file not found on d obj '+i[0])
task = self.create_task('d_header')
task.set_inputs(node)
task.set_outputs(node.change_ext('.di'))
d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
@conftest
def d_platform_flags(conf):
binfmt = conf.env.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
conf.env.DEST_OS or Utils.unversioned_sys_platform())
if binfmt == 'pe':
v['D_program_PATTERN'] = '%s.exe'
v['D_shlib_PATTERN'] = 'lib%s.dll'
v['D_staticlib_PATTERN'] = 'lib%s.a'
else:
v['D_program_PATTERN'] = '%s'
v['D_shlib_PATTERN'] = 'lib%s.so'
v['D_staticlib_PATTERN'] = 'lib%s.a'
# quick test #
if __name__ == "__main__":
#Logs.verbose = 2
try: arg = sys.argv[1]
except IndexError: arg = "file.d"
print("".join(filter_comments(arg)))
# TODO
paths = ['.']
#gruik = filter()
#gruik.start(arg)
#code = "".join(gruik.buf)
#print "we have found the following code"
#print code
#print "now parsing"
#print "-------------------------------------------"
"""
parser_ = d_parser()
parser_.start(arg)
print "module: %s" % parser_.module
print "imports: ",
for imp in parser_.imports:
print imp + " ",
print
"""

37
tools/wafadmin/Tools/dbus.py

@ -0,0 +1,37 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
import Task, Utils
from TaskGen import taskgen, before, after, feature
@taskgen
def add_dbus_file(self, filename, prefix, mode):
if not hasattr(self, 'dbus_lst'):
self.dbus_lst = []
self.meths.append('process_dbus')
self.dbus_lst.append([filename, prefix, mode])
@before('apply_core')
def process_dbus(self):
for filename, prefix, mode in getattr(self, 'dbus_lst', []):
env = self.env.copy()
node = self.path.find_resource(filename)
if not node:
raise Utils.WafError('file not found ' + filename)
env['DBUS_BINDING_TOOL_PREFIX'] = prefix
env['DBUS_BINDING_TOOL_MODE'] = mode
task = self.create_task('dbus_binding_tool', env)
task.set_inputs(node)
task.set_outputs(node.change_ext('.h'))
Task.simple_task_type('dbus_binding_tool',
'${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
color='BLUE', before='cc')
def detect(conf):
dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')

64
tools/wafadmin/Tools/dmd.py

@ -0,0 +1,64 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2008 (ita)
import sys
import Utils, ar
from Configure import conftest
@conftest
def find_dmd(conf):
conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
@conftest
def common_flags_ldc(conf):
v = conf.env
v['DFLAGS'] = ['-d-version=Posix']
v['DLINKFLAGS'] = []
v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
@conftest
def common_flags_dmd(conf):
v = conf.env
# _DFLAGS _DIMPORTFLAGS
# Compiler is dmd so 'gdc' part will be ignored, just
# ensure key is there, so wscript can append flags to it
v['DFLAGS'] = ['-version=Posix']
v['D_SRC_F'] = ''
v['D_TGT_F'] = ['-c', '-of']
v['DPATH_ST'] = '-I%s' # template for adding import paths
# linker
v['D_LINKER'] = v['D_COMPILER']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = '-of'
v['DLIB_ST'] = '-L-l%s' # template for adding libs
v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths
# linker debug levels
v['DFLAGS_OPTIMIZED'] = ['-O']
v['DFLAGS_DEBUG'] = ['-g', '-debug']
v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
v['DLINKFLAGS'] = ['-quiet']
v['D_shlib_DFLAGS'] = ['-fPIC']
v['D_shlib_LINKFLAGS'] = ['-L-shared']
v['DHEADER_ext'] = '.di'
v['D_HDR_F'] = ['-H', '-Hf']
def detect(conf):
conf.find_dmd()
conf.check_tool('ar')
conf.check_tool('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if conf.env.D_COMPILER.find('ldc') > -1:
conf.common_flags_ldc()

26
tools/wafadmin/Tools/flex.py

@ -0,0 +1,26 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy, 2006-2008
"Flex processing"
import TaskGen
def decide_ext(self, node):
if 'cxx' in self.features: return '.lex.cc'
else: return '.lex.c'
TaskGen.declare_chain(
name = 'flex',
rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
ext_in = '.l',
decider = decide_ext,
before = 'cc cxx',
)
def detect(conf):
conf.find_program('flex', var='FLEX', mandatory=True)
v = conf.env
v['FLEXFLAGS'] = ''

40
tools/wafadmin/Tools/gas.py

@ -0,0 +1,40 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008 (ita)
"as and gas"
import os, sys
import Task
from TaskGen import extension, taskgen, after, before
EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
@extension(EXT_ASM)
def asm_hook(self, node):
# create the compilation task: cpp or cc
task = self.create_task('asm')
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task.inputs = [node]
task.outputs = [node.change_ext(obj_ext)]
self.compiled_tasks.append(task)
self.meths.append('asm_incflags')
@taskgen
@after('apply_obj_vars_cc')
@after('apply_obj_vars_cxx')
@before('apply_link')
def asm_incflags(self):
if self.env['ASINCFLAGS']: self.env['_ASINCFLAGS'] = self.env['ASINCFLAGS']
if 'cxx' in self.features: self.env['_ASINCFLAGS'] = self.env['_CXXINCFLAGS']
else: self.env['_ASINCFLAGS'] = self.env['_CCINCFLAGS']
def detect(conf):
conf.find_program(['gas', 'as'], var='AS')
if not conf.env.AS: conf.env.AS = conf.env.CC

121
tools/wafadmin/Tools/gcc.py

@ -0,0 +1,121 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_gcc(conf):
cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
cc = conf.cmd_to_list(cc)
ccroot.get_cc_version(conf, cc, gcc=True)
conf.env.CC_NAME = 'gcc'
conf.env.CC = cc
@conftest
def gcc_common_flags(conf):
v = conf.env
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CCDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-shared']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
@conftest
def gcc_modifier_win32(conf):
v = conf.env
v['program_PATTERN'] = '%s.exe'
v['shlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
v['shlib_CCFLAGS'] = ['-DPIC', '-DDLL_EXPORT'] # TODO 64-bit platforms may need -fPIC
v.append_value('LINKFLAGS', '-Wl,--enable-auto-import') # suppress information messages
@conftest
def gcc_modifier_cygwin(conf):
gcc_modifier_win32(conf)
v = conf.env
v['shlib_PATTERN'] = 'cyg%s.dll'
v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
@conftest
def gcc_modifier_darwin(conf):
v = conf.env
v['shlib_CCFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
v['shlib_LINKFLAGS'] = ['-dynamiclib']
v['shlib_PATTERN'] = 'lib%s.dylib'
v['staticlib_LINKFLAGS'] = []
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
@conftest
def gcc_modifier_aix(conf):
v = conf.env
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = ''
@conftest
def gcc_modifier_platform(conf):
# * set configurations specific for a platform.
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
# and if it's not recognised, it fallbacks to sys.platform.
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
if gcc_modifier_func:
gcc_modifier_func(conf)
def detect(conf):
conf.find_gcc()
conf.find_cpp()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()

52
tools/wafadmin/Tools/gdc.py

@ -0,0 +1,52 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
import sys
import Utils, ar
from Configure import conftest
@conftest
def find_gdc(conf):
conf.find_program('gdc', var='D_COMPILER', mandatory=True)
@conftest
def common_flags_gdc(conf):
v = conf.env
# _DFLAGS _DIMPORTFLAGS
# for mory info about the meaning of this dict see dmd.py
v['DFLAGS'] = []
v['D_SRC_F'] = ''
v['D_TGT_F'] = ['-c', '-o', '']
v['DPATH_ST'] = '-I%s' # template for adding import paths
# linker
v['D_LINKER'] = v['D_COMPILER']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = ['-o', '']
v['DLIB_ST'] = '-l%s' # template for adding libs
v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths
# debug levels
v['DLINKFLAGS'] = []
v['DFLAGS_OPTIMIZED'] = ['-O3']
v['DFLAGS_DEBUG'] = ['-O0']
v['DFLAGS_ULTRADEBUG'] = ['-O0']
v['D_shlib_DFLAGS'] = []
v['D_shlib_LINKFLAGS'] = ['-shared']
v['DHEADER_ext'] = '.di'
v['D_HDR_F'] = '-fintfc -fintfc-file='
def detect(conf):
conf.find_gdc()
conf.check_tool('ar')
conf.check_tool('d')
conf.common_flags_gdc()
conf.d_platform_flags()

165
tools/wafadmin/Tools/glib2.py

@ -0,0 +1,165 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
"GLib2 support"
import Task, Utils
from TaskGen import taskgen, before, after, feature
#
# glib-genmarshal
#
@taskgen
def add_marshal_file(self, filename, prefix):
if not hasattr(self, 'marshal_list'):
self.marshal_list = []
self.meths.append('process_marshal')
self.marshal_list.append((filename, prefix))
@before('apply_core')
def process_marshal(self):
for f, prefix in getattr(self, 'marshal_list', []):
node = self.path.find_resource(f)
if not node:
raise Utils.WafError('file not found %r' % f)
h_node = node.change_ext('.h')
c_node = node.change_ext('.c')
task = self.create_task('glib_genmarshal')
task.set_inputs(node)
task.set_outputs([h_node, c_node])
task.env['GLIB_GENMARSHAL_PREFIX'] = prefix
self.allnodes.append(c_node)
def genmarshal_func(self):
bld = self.inputs[0].__class__.bld
get = self.env.get_flat
cmd1 = "%s %s --prefix=%s --header > %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(self.env),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[0].abspath(self.env)
)
ret = bld.exec_command(cmd1)
if ret: return ret
#print self.outputs[1].abspath(self.env)
f = open(self.outputs[1].abspath(self.env), 'wb')
f.write('''#include "%s"\n''' % self.outputs[0].name)
f.close()
cmd2 = "%s %s --prefix=%s --body >> %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(self.env),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[1].abspath(self.env)
)
ret = Utils.exec_command(cmd2)
if ret: return ret
#
# glib-mkenums
#
@taskgen
def add_enums_from_template(self, source='', target='', template='', comments=''):
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'target': target,
'template': template,
'file-head': '',
'file-prod': '',
'file-tail': '',
'enum-prod': '',
'value-head': '',
'value-prod': '',
'value-tail': '',
'comments': comments})
@taskgen
def add_enums(self, source='', target='',
file_head='', file_prod='', file_tail='', enum_prod='',
value_head='', value_prod='', value_tail='', comments=''):
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'template': '',
'target': target,
'file-head': file_head,
'file-prod': file_prod,
'file-tail': file_tail,
'enum-prod': enum_prod,
'value-head': value_head,
'value-prod': value_prod,
'value-tail': value_tail,
'comments': comments})
@before('apply_core')
def process_enums(self):
for enum in getattr(self, 'enums_list', []):
# temporary
env = self.env.copy()
task = self.create_task('glib_mkenums', env)
inputs = []
# process the source
source_list = self.to_list(enum['source'])
if not source_list:
raise Utils.WafError('missing source ' + str(enum))
source_list = [self.path.find_resource(k) for k in source_list]
inputs += source_list
env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
# find the target
if not enum['target']:
raise Utils.WafError('missing target ' + str(enum))
tgt_node = self.path.find_or_declare(enum['target'])
if tgt_node.name.endswith('.c'):
self.allnodes.append(tgt_node)
env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
options = []
if enum['template']: # template, if provided
template_node = self.path.find_resource(enum['template'])
options.append('--template %s' % (template_node.abspath(env)))
inputs.append(template_node)
params = {'file-head' : '--fhead',
'file-prod' : '--fprod',
'file-tail' : '--ftail',
'enum-prod' : '--eprod',
'value-head' : '--vhead',
'value-prod' : '--vprod',
'value-tail' : '--vtail',
'comments': '--comments'}
for param, option in params.iteritems():
if enum[param]:
options.append('%s %r' % (option, enum[param]))
env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
# update the task instance
task.set_inputs(inputs)
task.set_outputs(tgt_node)
Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
color='BLUE', before='cc')
Task.simple_task_type('glib_mkenums',
'${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
color='PINK', before='cc')
def detect(conf):
glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')

214
tools/wafadmin/Tools/gnome.py

@ -0,0 +1,214 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
"Gnome support"
import os, re
import TaskGen, Utils, Runner, Task, Build, Options, Logs
import cc
from Logs import error
from TaskGen import taskgen, before, after, feature
n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
def postinstall_schemas(prog_name):
if Build.bld.is_install:
dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
if not Options.options.destdir:
# add the gconf schema
Utils.pprint('YELLOW', 'Installing GConf schema')
command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
ret = Utils.exec_command(command)
else:
Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
def postinstall_icons():
dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
if Build.bld.is_install:
if not Options.options.destdir:
# update the pixmap cache directory
Utils.pprint('YELLOW', "Updating Gtk icon cache.")
command = 'gtk-update-icon-cache -q -f -t %s' % dir
ret = Utils.exec_command(command)
else:
Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
def postinstall_scrollkeeper(prog_name):
if Build.bld.is_install:
# now the scrollkeeper update if we can write to the log file
if os.access('/var/log/scrollkeeper.log', os.W_OK):
dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
ret = Utils.exec_command(command)
def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
if schemas: postinstall_schemas(prog_name)
if icons: postinstall_icons()
if scrollkeeper: postinstall_scrollkeeper(prog_name)
# OBSOLETE
class gnome_doc_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('gnome_doc')
def init_gnome_doc(self):
self.default_install_path = '${PREFIX}/share'
@feature('gnome_doc')
@after('init_gnome_doc')
def apply_gnome_doc(self):
self.env['APPNAME'] = self.doc_module
lst = self.to_list(self.doc_linguas)
bld = self.bld
for x in lst:
tsk = self.create_task('xml2po')
node = self.path.find_resource(x+'/'+x+'.po')
src = self.path.find_resource('C/%s.xml' % self.doc_module)
out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
tsk.set_inputs([node, src])
tsk.set_outputs(out)
tsk2 = self.create_task('xsltproc2po')
out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
tsk2.set_outputs(out2)
node = self.path.find_resource(self.doc_module+".omf.in")
tsk2.inputs = [node, out]
tsk2.run_after.append(tsk)
if bld.is_install:
path = self.install_path + 'gnome/help/%s/%s' % (self.doc_module, x)
bld.install_files(self.install_path + 'omf', out2, env=self.env)
for y in self.to_list(self.doc_figures):
try:
os.stat(self.path.abspath() + '/' + x + '/' + y)
bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
except:
bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
# OBSOLETE
class xml_to_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('xml_to')
def init_xml_to(self):
Utils.def_attrs(self,
source = 'xmlfile',
xslt = 'xlsltfile',
target = 'hey',
default_install_path = '${PREFIX}',
task_created = None)
@feature('xml_to')
@after('init_xml_to')
def apply_xml_to(self):
xmlfile = self.path.find_resource(self.source)
xsltfile = self.path.find_resource(self.xslt)
tsk = self.create_task('xmlto')
tsk.set_inputs([xmlfile, xsltfile])
tsk.set_outputs(xmlfile.change_ext('html'))
tsk.install_path = self.install_path
def sgml_scan(self):
node = self.inputs[0]
env = self.env
variant = node.variant(env)
fi = open(node.abspath(env), 'r')
content = fi.read()
fi.close()
# we should use a sgml parser :-/
name = n1_regexp.findall(content)[0]
num = n2_regexp.findall(content)[0]
doc_name = name+'.'+num
if not self.outputs:
self.outputs = [self.generator.path.find_or_declare(doc_name)]
return ([], [doc_name])
class gnome_sgml2man_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('gnome_sgml2man')
def apply_gnome_sgml2man(self):
"""
we could make it more complicated, but for now we just scan the document each time
"""
assert(getattr(self, 'appname', None))
def install_result(task):
out = task.outputs[0]
name = out.name
ext = name[-1]
env = task.env
self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
self.bld.rescan(self.path)
for name in self.bld.cache_dir_contents[self.path.id]:
base, ext = os.path.splitext(name)
if ext != '.sgml': continue
task = self.create_task('sgml2man')
task.set_inputs(self.path.find_resource(name))
task.task_generator = self
if self.bld.is_install: task.install = install_result
# no outputs, the scanner does it
# no caching for now, this is not a time-critical feature
# in the future the scanner can be used to do more things (find dependencies, etc)
task.scan()
cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE')
cls.scan = sgml_scan
cls.quiet = 1
Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
# how do you expect someone to understand this?!
xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
--stringparam db2omf.basename ${APPNAME} \
--stringparam db2omf.format docbook \
--stringparam db2omf.lang C \
--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
${DB2OMF} ${SRC[1].abspath(env)}"""
#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
def detect(conf):
conf.check_tool('gnu_dirs glib2 dbus')
sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
def getstr(varname):
return getattr(Options.options, varname, '')
# addefine also sets the variable to the env
conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
xml2po = conf.find_program('xml2po', var='XML2PO')
xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
conf.env['XML2POFLAGS'] = '-e -p'
conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
def set_options(opt):
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')

111
tools/wafadmin/Tools/gnu_dirs.py

@ -0,0 +1,111 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
"""
To use this module do not forget to call
opt.tool_options('gnu_dirs')
AND
conf.check_tool('gnu_dirs')
Add options for the standard GNU directories, this tool will add the options
found in autotools, and will update the environment with the following
installation variables:
* PREFIX : architecture-independent files [/usr/local]
* EXEC_PREFIX : architecture-dependent files [PREFIX]
* BINDIR : user executables [EXEC_PREFIX/bin]
* SBINDIR : user executables [EXEC_PREFIX/sbin]
* LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
* SYSCONFDIR : read-only single-machine data [PREFIX/etc]
* SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
* LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
* LIBDIR : object code libraries [EXEC_PREFIX/lib]
* INCLUDEDIR : C header files [PREFIX/include]
* OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
* DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
* DATADIR : read-only architecture-independent data [DATAROOTDIR]
* INFODIR : info documentation [DATAROOTDIR/info]
* LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
* MANDIR : man documentation [DATAROOTDIR/man]
* DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
* HTMLDIR : html documentation [DOCDIR]
* DVIDIR : dvi documentation [DOCDIR]
* PDFDIR : pdf documentation [DOCDIR]
* PSDIR : ps documentation [DOCDIR]
"""
import Utils, Options
_options = [x.split(', ') for x in '''
bindir, user executables, ${EXEC_PREFIX}/bin
sbindir, system admin executables, ${EXEC_PREFIX}/sbin
libexecdir, program executables, ${EXEC_PREFIX}/libexec
sysconfdir, read-only single-machine data, ${PREFIX}/etc
sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
localstatedir, modifiable single-machine data, ${PREFIX}/var
libdir, object code libraries, ${EXEC_PREFIX}/lib
includedir, C header files, ${PREFIX}/include
oldincludedir, C header files for non-gcc, /usr/include
datarootdir, read-only arch.-independent data root, ${PREFIX}/share
datadir, read-only architecture-independent data, ${DATAROOTDIR}
infodir, info documentation, ${DATAROOTDIR}/info
localedir, locale-dependent data, ${DATAROOTDIR}/locale
mandir, man documentation, ${DATAROOTDIR}/man
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
htmldir, html documentation, ${DOCDIR}
dvidir, dvi documentation, ${DOCDIR}
pdfdir, pdf documentation, ${DOCDIR}
psdir, ps documentation, ${DOCDIR}
'''.split('\n') if x]
def detect(conf):
def get_param(varname, default):
return getattr(Options.options, varname, '') or default
env = conf.env
env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
env['PACKAGE'] = Utils.g_module.APPNAME or env['PACKAGE']
complete = False
iter = 0
while not complete and iter < len(_options) + 1:
iter += 1
complete = True
for name, help, default in _options:
name = name.upper()
if not env[name]:
try:
env[name] = Utils.subst_vars(get_param(name, default), env)
except TypeError:
complete = False
if not complete:
lst = [name for name, _, _ in _options if not env[name.upper()]]
raise Utils.WafError('Variable substitution failure %r' % lst)
def set_options(opt):
inst_dir = opt.add_option_group('Installation directories',
'By default, "waf install" will put the files in\
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
for k in ('--prefix', '--destdir'):
option = opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
inst_dir.add_option(option)
inst_dir.add_option('--exec-prefix',
help = 'installation prefix [Default: ${PREFIX}]',
default = '',
dest = 'EXEC_PREFIX')
dirs_options = opt.add_option_group('Pre-defined installation directories', '')
for name, help, default in _options:
option_name = '--' + name
str_default = default
str_help = '%s [Default: %s]' % (help, str_default)
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())

18
tools/wafadmin/Tools/gob2.py

@ -0,0 +1,18 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
import TaskGen
TaskGen.declare_chain(
name = 'gob2',
rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
ext_in = '.gob',
ext_out = '.c'
)
def detect(conf):
gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
conf.env['GOB2'] = gob2
conf.env['GOB2FLAGS'] = ''

125
tools/wafadmin/Tools/gxx.py

@ -0,0 +1,125 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_gxx(conf):
cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
cxx = conf.cmd_to_list(cxx)
ccroot.get_cc_version(conf, cxx, gcc=True)
conf.env.CXX_NAME = 'gcc'
conf.env.CXX = cxx
@conftest
def gxx_common_flags(conf):
v = conf.env
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = ''
v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CXXDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-shared']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
@conftest
def gxx_modifier_win32(conf):
v = conf.env
v['program_PATTERN'] = '%s.exe'
v['shlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
v['shlib_CXXFLAGS'] = ['-DPIC', '-DDLL_EXPORT'] # TODO 64-bit platforms may need -fPIC
# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
# that the linker emits otherwise.
v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
@conftest
def gxx_modifier_cygwin(conf):
gxx_modifier_win32(conf)
v = conf.env
v['shlib_PATTERN'] = 'cyg%s.dll'
v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
@conftest
def gxx_modifier_darwin(conf):
v = conf.env
v['shlib_CXXFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
v['shlib_LINKFLAGS'] = ['-dynamiclib']
v['shlib_PATTERN'] = 'lib%s.dylib'
v['staticlib_LINKFLAGS'] = []
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
@conftest
def gxx_modifier_aix(conf):
v = conf.env
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['shlib_LINKFLAGS'] = ['-shared', '-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = ''
@conftest
def gxx_modifier_platform(conf):
# * set configurations specific for a platform.
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
# and if it's not recognised, it fallbacks to sys.platform.
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
if gxx_modifier_func:
gxx_modifier_func(conf)
def detect(conf):
conf.find_gxx()
conf.find_cpp()
conf.find_ar()
conf.gxx_common_flags()
conf.gxx_modifier_platform()
conf.cxx_load_tools()
conf.cxx_add_flags()

36
tools/wafadmin/Tools/icc.py

@ -0,0 +1,36 @@
#!/usr/bin/env python
# encoding: utf-8
# Stian Selnes, 2008
# Thomas Nagy 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar, gcc
from Configure import conftest
@conftest
def find_icc(conf):
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('icc', var='CC')
if not cc: cc = conf.find_program('ICL', var='CC')
if not cc: conf.fatal('Intel C Compiler (icc) was not found')
cc = conf.cmd_to_list(cc)
ccroot.get_cc_version(conf, cc, icc=True)
v['CC'] = cc
v['CC_NAME'] = 'icc'
detect = '''
find_icc
find_ar
gcc_common_flags
gcc_modifier_platform
cc_load_tools
cc_add_flags
'''

34
tools/wafadmin/Tools/icpc.py

@ -0,0 +1,34 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar, gxx
from Configure import conftest
@conftest
def find_icpc(conf):
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v = conf.env
cxx = None
if v['CXX']: cxx = v['CXX']
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
if not cxx: cxx = conf.find_program('icpc', var='CXX')
if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
cxx = conf.cmd_to_list(cxx)
ccroot.get_cc_version(conf, cxx, icc=True)
v['CXX'] = cxx
v['CXX_NAME'] = 'icc'
detect = '''
find_icpc
find_ar
gxx_common_flags
gxx_modifier_platform
cxx_load_tools
cxx_add_flags
'''

143
tools/wafadmin/Tools/intltool.py

@ -0,0 +1,143 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"intltool support"
import os, re
import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
from TaskGen import feature, before, taskgen
from Logs import error
"""
Usage:
bld.new_task_gen(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
"""
class intltool_in_taskgen(TaskGen.task_gen):
"""deprecated"""
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@before('apply_core')
@feature('intltool_in')
def iapply_intltool_in_f(self):
try: self.meths.remove('apply_core')
except ValueError: pass
for i in self.to_list(self.source):
node = self.path.find_resource(i)
podir = getattr(self, 'podir', 'po')
podirnode = self.path.find_dir(podir)
if not podirnode:
error("could not find the podir %r" % podir)
continue
cache = getattr(self, 'intlcache', '.intlcache')
self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
self.env['INTLPODIR'] = podirnode.srcpath(self.env)
self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
task = self.create_task('intltool')
task.set_inputs(node)
task.set_outputs(node.change_ext(''))
task.install_path = self.install_path
class intltool_po_taskgen(TaskGen.task_gen):
"""deprecated"""
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('intltool_po')
def apply_intltool_po(self):
try: self.meths.remove('apply_core')
except ValueError: pass
self.default_install_path = '${LOCALEDIR}'
appname = getattr(self, 'appname', 'set_your_app_name')
podir = getattr(self, 'podir', '')
def install_translation(task):
out = task.outputs[0]
filename = out.name
(langname, ext) = os.path.splitext(filename)
inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
if linguas:
# scan LINGUAS file for locales to process
file = open(linguas.abspath())
langs = []
for line in file.readlines():
# ignore lines containing comments
if not line.startswith('#'):
langs += line.split()
file.close()
re_linguas = re.compile('[-a-zA-Z_@.]+')
for lang in langs:
# Make sure that we only process lines which contain locales
if re_linguas.match(lang):
node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
task = self.create_task('po')
task.set_inputs(node)
task.set_outputs(node.change_ext('.mo'))
if self.bld.is_install: task.install = install_translation
else:
Utils.pprint('RED', "Error no LINGUAS file found in po directory")
Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
Task.simple_task_type('intltool',
'${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
color='BLUE', after="cc_link cxx_link", shell=False)
def detect(conf):
pocom = conf.find_program('msgfmt')
if not pocom:
# if msgfmt should not be mandatory, catch the thrown exception in your wscript
conf.fatal('The program msgfmt (gettext) is mandatory!')
conf.env['POCOM'] = pocom
# NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
intltool = conf.find_program('intltool-merge', var='INTLTOOL')
if not intltool:
# if intltool-merge should not be mandatory, catch the thrown exception in your wscript
if Options.platform == 'win32':
perl = conf.find_program('perl', var='PERL')
if not perl:
conf.fatal('The program perl (required by intltool) could not be found')
intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
if not intltooldir:
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
else:
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
def getstr(varname):
return getattr(Options.options, varname, '')
prefix = conf.env['PREFIX']
datadir = getstr('datadir')
if not datadir: datadir = os.path.join(prefix,'share')
conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
conf.define('DATADIR', datadir)
if conf.env['CC'] or conf.env['CXX']:
# Define to 1 if <locale.h> is present
conf.check(header_name='locale.h')
def set_options(opt):
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')

255
tools/wafadmin/Tools/javaw.py

@ -0,0 +1,255 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
"""
Java support
Javac is one of the few compilers that behaves very badly:
* it outputs files where it wants to (-d is only for the package root)
* it recompiles files silently behind your back
* it outputs an undefined amount of files (inner classes)
Fortunately, the convention makes it possible to use the build dir without
too many problems for the moment
Inner classes must be located and cleaned when a problem arise,
for the moment waf does not track the production of inner classes.
Adding all the files to a task and executing it if any of the input files
change is only annoying for the compilation times
Compilation can be run using Jython[1] rather than regular Python. Instead of
running one of the following commands:
./waf configure
python waf configure
You would have to run:
java -jar /path/to/jython.jar waf configure
[1] http://www.jython.org/
"""
import os, re
from Configure import conf
import TaskGen, Task, Utils, Options, Build
from TaskGen import feature, before, taskgen
class_check_source = '''
public class Test {
public static void main(String[] argv) {
Class lib;
if (argv.length < 1) {
System.err.println("Missing argument");
System.exit(77);
}
try {
lib = Class.forName(argv[0]);
} catch (ClassNotFoundException e) {
System.err.println("ClassNotFoundException");
System.exit(1);
}
lib = null;
System.exit(0);
}
}
'''
@feature('jar')
@before('apply_core')
def jar_files(self):
basedir = getattr(self, 'basedir', '.')
destfile = getattr(self, 'destfile', 'test.jar')
jaropts = getattr(self, 'jaropts', [])
jarcreate = getattr(self, 'jarcreate', 'cf')
dir = self.path.find_dir(basedir)
if not dir: raise
jaropts.append('-C')
jaropts.append(dir.abspath(self.env))
jaropts.append('.')
out = self.path.find_or_declare(destfile)
tsk = self.create_task('jar_create')
tsk.set_outputs(out)
tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
tsk.env['JAROPTS'] = jaropts
tsk.env['JARCREATE'] = jarcreate
@feature('javac')
@before('apply_core')
def apply_java(self):
Utils.def_attrs(self, jarname='', jaropts='', classpath='',
sourcepath='.', srcdir='.', source_re='**/*.java',
jar_mf_attributes={}, jar_mf_classpath=[])
if getattr(self, 'source_root', None):
# old stuff
self.srcdir = self.source_root
nodes_lst = []
if not self.classpath:
if not self.env['CLASSPATH']:
self.env['CLASSPATH'] = '..' + os.pathsep + '.'
else:
self.env['CLASSPATH'] = self.classpath
srcdir_node = self.path.find_dir(self.srcdir)
if not srcdir_node:
raise Utils.WafError('could not find srcdir %r' % self.srcdir)
src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
bld_nodes = [x.change_ext('.class') for x in src_nodes]
self.env['OUTDIR'] = [srcdir_node.abspath(self.env)]
tsk = self.create_task('javac')
tsk.set_inputs(src_nodes)
tsk.set_outputs(bld_nodes)
if getattr(self, 'compat', None):
tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
if hasattr(self, 'sourcepath'):
fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
names = os.pathsep.join([x.srcpath() for x in fold])
else:
names = srcdir_node.srcpath()
if names:
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
if self.jarname:
tsk = self.create_task('jar_create')
tsk.set_inputs(bld_nodes)
tsk.set_outputs(self.path.find_or_declare(self.jarname))
if not self.env['JAROPTS']:
if self.jaropts:
self.env['JAROPTS'] = self.jaropts
else:
dirs = '.'
self.env['JAROPTS'] = ['-C', ''.join(self.env['OUTDIR']), dirs]
Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN')
cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}')
cls.color = 'BLUE'
def post_run_javac(self):
"""this is for cleaning the folder
javac creates single files for inner classes
but it is not possible to know which inner classes in advance"""
par = {}
for x in self.inputs:
par[x.parent.id] = x.parent
inner = {}
for k in par.values():
path = k.abspath(self.env)
lst = os.listdir(path)
for u in lst:
if u.find('$') >= 0:
inner_class_node = k.find_or_declare(u)
inner[inner_class_node.id] = inner_class_node
to_add = set(inner.keys()) - set([x.id for x in self.outputs])
for x in to_add:
self.outputs.append(inner[x])
return Task.Task.post_run(self)
cls.post_run = post_run_javac
def detect(conf):
# If JAVA_PATH is set, we prepend it to the path list
java_path = conf.environ['PATH'].split(os.pathsep)
v = conf.env
if 'JAVA_HOME' in conf.environ:
java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
for x in 'javac java jar'.split():
conf.find_program(x, var=x.upper(), path_list=java_path)
conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
v['JAVA_EXT'] = ['.java']
if 'CLASSPATH' in conf.environ:
v['CLASSPATH'] = conf.environ['CLASSPATH']
if not v['JAR']: conf.fatal('jar is required for making java packages')
if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
v['JARCREATE'] = 'cf' # can use cvf
@conf
def check_java_class(self, classname, with_classpath=None):
"""Check if the specified java class is installed"""
import shutil
javatestdir = '.waf-javatest'
classpath = javatestdir
if self.env['CLASSPATH']:
classpath += os.pathsep + self.env['CLASSPATH']
if isinstance(with_classpath, str):
classpath += os.pathsep + with_classpath
shutil.rmtree(javatestdir, True)
os.mkdir(javatestdir)
java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
java_file.write(class_check_source)
java_file.close()
# Compile the source
Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
# Try to run the app
cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
self.log.write("%s\n" % str(cmd))
found = Utils.exec_command(cmd, shell=False, log=self.log)
self.check_message('Java class %s' % classname, "", not found)
shutil.rmtree(javatestdir, True)
return found
@conf
def check_jni_headers(conf):
"""
Check for jni headers and libraries
On success the environment variable xxx_JAVA is added for uselib
"""
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a compiler first (gcc, g++, ..)')
if not conf.env.JAVA_HOME:
conf.fatal('set JAVA_HOME in the system environment')
# jni requires the jvm
javaHome = conf.env['JAVA_HOME'][0]
b = Build.BuildContext()
b.load_dirs(conf.srcdir, conf.blddir)
dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
incDirs = [x.parent.abspath() for x in f]
dir = b.root.find_dir(conf.env.JAVA_HOME[0])
f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
libDirs = [x.parent.abspath() for x in f] or [javaHome]
for i, d in enumerate(libDirs):
if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
break
else:
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)

76
tools/wafadmin/Tools/kde4.py

@ -0,0 +1,76 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
import os, sys, re
import Options, TaskGen, Task, Utils
from TaskGen import taskgen, feature, after
class msgfmt_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('msgfmt')
def init_msgfmt(self):
#langs = '' # for example "foo/fr foo/br"
self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
@feature('msgfmt')
@after('init_msgfmt')
def apply_msgfmt(self):
for lang in self.to_list(self.langs):
node = self.path.find_resource(lang+'.po')
task = self.create_task('msgfmt')
task.set_inputs(node)
task.set_outputs(node.change_ext('.mo'))
if not self.bld.is_install: continue
langname = lang.split('/')
langname = langname[-1]
task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
task.chmod = self.chmod
def detect(conf):
kdeconfig = conf.find_program('kde4-config')
if not kdeconfig:
conf.fatal('we need kde4-config')
prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try: os.stat(file)
except OSError:
file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try: os.stat(file)
except OSError: conf.fatal('could not open %s' % file)
try:
txt = Utils.readf(file)
except (OSError, IOError):
conf.fatal('could not read %s' % file)
txt = txt.replace('\\\n', '\n')
fu = re.compile('#(.*)\n')
txt = fu.sub('', txt)
setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
found = setregexp.findall(txt)
for (_, key, val) in found:
#print key, val
conf.env[key] = val
# well well, i could just write an interpreter for cmake files
conf.env['LIB_KDECORE']='kdecore'
conf.env['LIB_KDEUI'] ='kdeui'
conf.env['LIB_KIO'] ='kio'
conf.env['LIB_KHTML'] ='khtml'
conf.env['LIB_KPARTS'] ='kparts'
conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
conf.env['MSGFMT'] = conf.find_program('msgfmt')
Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)

333
tools/wafadmin/Tools/libtool.py

@ -0,0 +1,333 @@
#!/usr/bin/env python
# encoding: utf-8
# Matthias Jahn, 2008, jahn matthias ath freenet punto de
# Thomas Nagy, 2008 (ita)
import sys, re, os, optparse
import TaskGen, Task, Utils, preproc
from Logs import error, debug, warn
from TaskGen import taskgen, after, before, feature
REVISION="0.1.3"
"""
if you want to use the code here, you must use something like this:
obj = obj.create(...)
obj.features.append("libtool")
obj.vnum = "1.2.3" # optional, but versioned libraries are common
"""
# fake libtool files
fakelibtool_vardeps = ['CXX', 'PREFIX']
def fakelibtool_build(task):
# Writes a .la file, used by libtool
env = task.env
dest = open(task.outputs[0].abspath(env), 'w')
sname = task.inputs[0].name
fu = dest.write
fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
if env['vnum']:
nums = env['vnum'].split('.')
libname = task.inputs[0].name
name3 = libname+'.'+env['vnum']
name2 = libname+'.'+nums[0]
name1 = libname
fu("dlname='%s'\n" % name2)
strn = " ".join([name3, name2, name1])
fu("library_names='%s'\n" % (strn) )
else:
fu("dlname='%s'\n" % sname)
fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
fu("old_library=''\n")
vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
fu("dependency_libs='%s'\n" % vars)
fu("current=0\n")
fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
fu("dlopen=''\ndlpreopen=''\n")
fu("libdir='%s/lib'\n" % env['PREFIX'])
dest.close()
return 0
def read_la_file(path):
sp = re.compile(r'^([^=]+)=\'(.*)\'$')
dc={}
file = open(path, "r")
for line in file.readlines():
try:
#print sp.split(line.strip())
_, left, right, _ = sp.split(line.strip())
dc[left]=right
except ValueError:
pass
file.close()
return dc
@feature("libtool")
@after('apply_link')
def apply_link_libtool(self):
if self.type != 'program':
linktask = self.link_task
latask = self.create_task('fakelibtool')
latask.set_inputs(linktask.outputs)
latask.set_outputs(linktask.outputs[0].change_ext('.la'))
self.latask = latask
if self.bld.is_install:
self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
@feature("libtool")
@before('apply_core')
def apply_libtool(self):
self.env['vnum']=self.vnum
paths=[]
libs=[]
libtool_files=[]
libtool_vars=[]
for l in self.env['LINKFLAGS']:
if l[:2]=='-L':
paths.append(l[2:])
elif l[:2]=='-l':
libs.append(l[2:])
for l in libs:
for p in paths:
dict = read_la_file(p+'/lib'+l+'.la')
linkflags2 = dict.get('dependency_libs', '')
for v in linkflags2.split():
if v.endswith('.la'):
libtool_files.append(v)
libtool_vars.append(v)
continue
self.env.append_unique('LINKFLAGS', v)
break
self.env['libtoolvars']=libtool_vars
while libtool_files:
file = libtool_files.pop()
dict = read_la_file(file)
for v in dict['dependency_libs'].split():
if v[-3:] == '.la':
libtool_files.append(v)
continue
self.env.append_unique('LINKFLAGS', v)
Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
class libtool_la_file:
def __init__ (self, la_filename):
self.__la_filename = la_filename
#remove path and .la suffix
self.linkname = str(os.path.split(la_filename)[-1])[:-3]
if self.linkname.startswith("lib"):
self.linkname = self.linkname[3:]
# The name that we can dlopen(3).
self.dlname = None
# Names of this library
self.library_names = None
# The name of the static archive.
self.old_library = None
# Libraries that this one depends upon.
self.dependency_libs = None
# Version information for libIlmImf.
self.current = None
self.age = None
self.revision = None
# Is this an already installed library?
self.installed = None
# Should we warn about portability when linking against -modules?
self.shouldnotlink = None
# Files to dlopen/dlpreopen
self.dlopen = None
self.dlpreopen = None
# Directory that this library needs to be installed in:
self.libdir = '/usr/lib'
if not self.__parse():
raise "file %s not found!!" %(la_filename)
def __parse(self):
"Retrieve the variables from a file"
if not os.path.isfile(self.__la_filename): return 0
la_file=open(self.__la_filename, 'r')
for line in la_file:
ln = line.strip()
if not ln: continue
if ln[0]=='#': continue
(key, value) = str(ln).split('=', 1)
key = key.strip()
value = value.strip()
if value == "no": value = False
elif value == "yes": value = True
else:
try: value = int(value)
except ValueError: value = value.strip("'")
setattr(self, key, value)
la_file.close()
return 1
def get_libs(self):
"""return linkflags for this lib"""
libs = []
if self.dependency_libs:
libs = str(self.dependency_libs).strip().split()
if libs == None:
libs = []
# add la lib and libdir
libs.insert(0, "-l%s" % self.linkname.strip())
libs.insert(0, "-L%s" % self.libdir.strip())
return libs
def __str__(self):
return '''\
dlname = "%(dlname)s"
library_names = "%(library_names)s"
old_library = "%(old_library)s"
dependency_libs = "%(dependency_libs)s"
version = %(current)s.%(age)s.%(revision)s
installed = "%(installed)s"
shouldnotlink = "%(shouldnotlink)s"
dlopen = "%(dlopen)s"
dlpreopen = "%(dlpreopen)s"
libdir = "%(libdir)s"''' % self.__dict__
class libtool_config:
def __init__ (self, la_filename):
self.__libtool_la_file = libtool_la_file(la_filename)
tmp = self.__libtool_la_file
self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
self.__sub_la_files = []
self.__sub_la_files.append(la_filename)
self.__libs = None
def __cmp__(self, other):
"""make it compareable with X.Y.Z versions (Y and Z are optional)"""
if not other:
return 1
othervers = [int(s) for s in str(other).split(".")]
selfvers = self.__version
return cmp(selfvers, othervers)
def __str__(self):
return "\n".join([
str(self.__libtool_la_file),
' '.join(self.__libtool_la_file.get_libs()),
'* New getlibs:',
' '.join(self.get_libs())
])
def __get_la_libs(self, la_filename):
return libtool_la_file(la_filename).get_libs()
def get_libs(self):
"""return the complete uniqe linkflags that do not
contain .la files anymore"""
libs_list = list(self.__libtool_la_file.get_libs())
libs_map = {}
while len(libs_list) > 0:
entry = libs_list.pop(0)
if entry:
if str(entry).endswith(".la"):
## prevents duplicate .la checks
if entry not in self.__sub_la_files:
self.__sub_la_files.append(entry)
libs_list.extend(self.__get_la_libs(entry))
else:
libs_map[entry]=1
self.__libs = libs_map.keys()
return self.__libs
def get_libs_only_L(self):
if not self.__libs: self.get_libs()
libs = self.__libs
libs = [s for s in libs if str(s).startswith('-L')]
return libs
def get_libs_only_l(self):
if not self.__libs: self.get_libs()
libs = self.__libs
libs = [s for s in libs if str(s).startswith('-l')]
return libs
def get_libs_only_other(self):
if not self.__libs: self.get_libs()
libs = self.__libs
libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
return libs
def useCmdLine():
"""parse cmdline args and control build"""
usage = '''Usage: %prog [options] PathToFile.la
example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
nor: %prog --libs /usr/lib/libamarok.la'''
parser = optparse.OptionParser(usage)
a = parser.add_option
a("--version", dest = "versionNumber",
action = "store_true", default = False,
help = "output version of libtool-config"
)
a("--debug", dest = "debug",
action = "store_true", default = False,
help = "enable debug"
)
a("--libs", dest = "libs",
action = "store_true", default = False,
help = "output all linker flags"
)
a("--libs-only-l", dest = "libs_only_l",
action = "store_true", default = False,
help = "output -l flags"
)
a("--libs-only-L", dest = "libs_only_L",
action = "store_true", default = False,
help = "output -L flags"
)
a("--libs-only-other", dest = "libs_only_other",
action = "store_true", default = False,
help = "output other libs (e.g. -pthread)"
)
a("--atleast-version", dest = "atleast_version",
default=None,
help = "return 0 if the module is at least version ATLEAST_VERSION"
)
a("--exact-version", dest = "exact_version",
default=None,
help = "return 0 if the module is exactly version EXACT_VERSION"
)
a("--max-version", dest = "max_version",
default=None,
help = "return 0 if the module is at no newer than version MAX_VERSION"
)
(options, args) = parser.parse_args()
if len(args) != 1 and not options.versionNumber:
parser.error("incorrect number of arguments")
if options.versionNumber:
print("libtool-config version %s" % REVISION)
return 0
ltf = libtool_config(args[0])
if options.debug:
print(ltf)
if options.atleast_version:
if ltf >= options.atleast_version: return 0
sys.exit(1)
if options.exact_version:
if ltf == options.exact_version: return 0
sys.exit(1)
if options.max_version:
if ltf <= options.max_version: return 0
sys.exit(1)
def p(x):
print(" ".join(x))
if options.libs: p(ltf.get_libs())
elif options.libs_only_l: p(ltf.get_libs_only_l())
elif options.libs_only_L: p(ltf.get_libs_only_L())
elif options.libs_only_other: p(ltf.get_libs_only_other())
return 0
if __name__ == '__main__':
useCmdLine()

25
tools/wafadmin/Tools/lua.py

@ -0,0 +1,25 @@
#!/usr/bin/env python
# encoding: utf-8
# Sebastian Schlingmann, 2008
# Thomas Nagy, 2008 (ita)
import TaskGen
from TaskGen import taskgen, feature
from Constants import *
TaskGen.declare_chain(
name = 'luac',
rule = '${LUAC} -s -o ${TGT} ${SRC}',
ext_in = '.lua',
ext_out = '.luac',
reentrant = 0,
install = 'LUADIR', # env variable
)
@feature('lua')
def init_lua(self):
self.default_chmod = O755
def detect(conf):
conf.find_program('luac', var='LUAC', mandatory = True)

433
tools/wafadmin/Tools/misc.py

@ -0,0 +1,433 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"""
Custom objects:
- execute a function everytime
- copy a file somewhere else
"""
import shutil, re, os
import TaskGen, Node, Task, Utils, Build, Constants
from TaskGen import feature, taskgen, after, before
from Logs import debug
def copy_func(tsk):
"Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
env = tsk.env
infile = tsk.inputs[0].abspath(env)
outfile = tsk.outputs[0].abspath(env)
try:
shutil.copy2(infile, outfile)
except (OSError, IOError):
return 1
else:
if tsk.chmod: os.chmod(outfile, tsk.chmod)
return 0
def action_process_file_func(tsk):
"Ask the function attached to the task to process it"
if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!')
return tsk.fun(tsk)
class cmd_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('cmd')
def apply_cmd(self):
"call a command everytime"
if not self.fun: raise Utils.WafError('cmdobj needs a function!')
tsk = Task.TaskBase()
tsk.fun = self.fun
tsk.env = self.env
self.tasks.append(tsk)
tsk.install_path = self.install_path
class copy_taskgen(TaskGen.task_gen):
"By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)"
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('copy')
@before('apply_core')
def apply_copy(self):
Utils.def_attrs(self, fun=copy_func)
self.default_install_path = 0
lst = self.to_list(self.source)
self.meths.remove('apply_core')
for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
target = self.target
if not target or len(lst)>1: target = node.name
# TODO the file path may be incorrect
newnode = self.path.find_or_declare(target)
tsk = self.create_task('copy')
tsk.set_inputs(node)
tsk.set_outputs(newnode)
tsk.fun = self.fun
tsk.chmod = self.chmod
if not tsk.env:
tsk.debug()
raise Utils.WafError('task without an environment')
def subst_func(tsk):
"Substitutes variables in a .in file"
m4_re = re.compile('@(\w+)@', re.M)
env = tsk.env
infile = tsk.inputs[0].abspath(env)
outfile = tsk.outputs[0].abspath(env)
code = Utils.readf(infile)
# replace all % by %% to prevent errors by % signs in the input file while string formatting
code = code.replace('%', '%%')
s = m4_re.sub(r'%(\1)s', code)
di = tsk.dict or {}
if not di:
names = m4_re.findall(code)
for i in names:
di[i] = env.get_flat(i) or env.get_flat(i.upper())
file = open(outfile, 'w')
file.write(s % di)
file.close()
if tsk.chmod: os.chmod(outfile, tsk.chmod)
class subst_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('subst')
@before('apply_core')
def apply_subst(self):
Utils.def_attrs(self, fun=subst_func)
self.default_install_path = 0
lst = self.to_list(self.source)
self.meths.remove('apply_core')
self.dict = getattr(self, 'dict', {})
for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
if self.target:
newnode = self.path.find_or_declare(self.target)
else:
newnode = node.change_ext('')
try:
self.dict = self.dict.get_merged_dict()
except AttributeError:
pass
if self.dict and not self.env['DICT_HASH']:
self.env = self.env.copy()
keys = list(self.dict.keys())
keys.sort()
lst = [self.dict[x] for x in keys]
self.env['DICT_HASH'] = str(Utils.h_list(lst))
tsk = self.create_task('copy')
tsk.set_inputs(node)
tsk.set_outputs(newnode)
tsk.fun = self.fun
tsk.dict = self.dict
tsk.dep_vars = ['DICT_HASH']
tsk.install_path = self.install_path
tsk.chmod = self.chmod
if not tsk.env:
tsk.debug()
raise Utils.WafError('task without an environment')
####################
## command-output ####
####################
class cmd_arg(object):
"""command-output arguments for representing files or folders"""
def __init__(self, name, template='%s'):
self.name = name
self.template = template
self.node = None
class input_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_resource(self.name)
if self.node is None:
raise Utils.WafError("Input file %s not found in " % (self.name, base_path))
def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath(env)
else:
return self.template % self.node.srcpath(env)
class output_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_or_declare(self.name)
if self.node is None:
raise Utils.WafError("Output file %s not found in " % (self.name, base_path))
def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath(env)
else:
return self.template % self.node.bldpath(env)
class cmd_dir_arg(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_dir(self.name)
if self.node is None:
raise Utils.WafError("Directory %s not found in " % (self.name, base_path))
class input_dir(cmd_dir_arg):
def get_path(self, dummy_env, dummy_absolute):
return self.template % self.node.abspath()
class output_dir(cmd_dir_arg):
def get_path(self, env, dummy_absolute):
return self.template % self.node.abspath(env)
class command_output(Task.Task):
color = "BLUE"
def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
Task.Task.__init__(self, env, normal=1)
assert isinstance(command, (str, Node.Node))
self.command = command
self.command_args = command_args
self.stdin = stdin
self.stdout = stdout
self.cwd = cwd
self.os_env = os_env
self.stderr = stderr
if command_node is not None: self.dep_nodes = [command_node]
self.dep_vars = [] # additional environment variables to look
def run(self):
task = self
#assert len(task.inputs) > 0
def input_path(node, template):
if task.cwd is None:
return template % node.bldpath(task.env)
else:
return template % node.abspath()
def output_path(node, template):
fun = node.abspath
if task.cwd is None: fun = node.bldpath
return template % fun(task.env)
if isinstance(task.command, Node.Node):
argv = [input_path(task.command, '%s')]
else:
argv = [task.command]
for arg in task.command_args:
if isinstance(arg, str):
argv.append(arg)
else:
assert isinstance(arg, cmd_arg)
argv.append(arg.get_path(task.env, (task.cwd is not None)))
if task.stdin:
stdin = open(input_path(task.stdin, '%s'))
else:
stdin = None
if task.stdout:
stdout = open(output_path(task.stdout, '%s'), "w")
else:
stdout = None
if task.stderr:
stderr = open(output_path(task.stderr, '%s'), "w")
else:
stderr = None
if task.cwd is None:
cwd = ('None (actually %r)' % os.getcwd())
else:
cwd = repr(task.cwd)
debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
(cwd, stdin, stdout, argv))
if task.os_env is None:
os_env = os.environ
else:
os_env = task.os_env
command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
return command.wait()
class cmd_output_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('command-output')
def init_cmd_output(self):
Utils.def_attrs(self,
stdin = None,
stdout = None,
stderr = None,
# the command to execute
command = None,
# whether it is an external command; otherwise it is assumed
# to be an executable binary or script that lives in the
# source or build tree.
command_is_external = False,
# extra parameters (argv) to pass to the command (excluding
# the command itself)
argv = [],
# dependencies to other objects -> this is probably not what you want (ita)
# values must be 'task_gen' instances (not names!)
dependencies = [],
# dependencies on env variable contents
dep_vars = [],
# input files that are implicit, i.e. they are not
# stdin, nor are they mentioned explicitly in argv
hidden_inputs = [],
# output files that are implicit, i.e. they are not
# stdout, nor are they mentioned explicitly in argv
hidden_outputs = [],
# change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
cwd = None,
# OS environment variables to pass to the subprocess
# if None, use the default environment variables unchanged
os_env = None)
@feature('command-output')
@after('init_cmd_output')
def apply_cmd_output(self):
if self.command is None:
raise Utils.WafError("command-output missing command")
if self.command_is_external:
cmd = self.command
cmd_node = None
else:
cmd_node = self.path.find_resource(self.command)
assert cmd_node is not None, ('''Could not find command '%s' in source tree.
Hint: if this is an external command,
use command_is_external=True''') % (self.command,)
cmd = cmd_node
if self.cwd is None:
cwd = None
else:
assert isinstance(cwd, CmdDirArg)
self.cwd.find_node(self.path)
args = []
inputs = []
outputs = []
for arg in self.argv:
if isinstance(arg, cmd_arg):
arg.find_node(self.path)
if isinstance(arg, input_file):
inputs.append(arg.node)
if isinstance(arg, output_file):
outputs.append(arg.node)
if self.stdout is None:
stdout = None
else:
assert isinstance(self.stdout, str)
stdout = self.path.find_or_declare(self.stdout)
if stdout is None:
raise Utils.WafError("File %s not found" % (self.stdout,))
outputs.append(stdout)
if self.stderr is None:
stderr = None
else:
assert isinstance(self.stderr, str)
stderr = self.path.find_or_declare(self.stderr)
if stderr is None:
raise Utils.WafError("File %s not found" % (self.stderr,))
outputs.append(stderr)
if self.stdin is None:
stdin = None
else:
assert isinstance(self.stdin, str)
stdin = self.path.find_resource(self.stdin)
if stdin is None:
raise Utils.WafError("File %s not found" % (self.stdin,))
inputs.append(stdin)
for hidden_input in self.to_list(self.hidden_inputs):
node = self.path.find_resource(hidden_input)
if node is None:
raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path))
inputs.append(node)
for hidden_output in self.to_list(self.hidden_outputs):
node = self.path.find_or_declare(hidden_output)
if node is None:
raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path))
outputs.append(node)
if not (inputs or getattr(self, 'no_inputs', None)):
raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
if not (outputs or getattr(self, 'no_outputs', None)):
raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
self.tasks.append(task)
task.inputs = inputs
task.outputs = outputs
task.dep_vars = self.to_list(self.dep_vars)
for dep in self.dependencies:
assert dep is not self
dep.post()
for dep_task in dep.tasks:
task.set_run_after(dep_task)
if not task.inputs:
# the case for svnversion, always run, and update the output nodes
task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
# TODO the case with no outputs?
def post_run(self):
for x in self.outputs:
h = Utils.h_file(x.abspath(self.env))
self.generator.bld.node_sigs[self.env.variant()][x.id] = h
def runnable_status(self):
return Constants.RUN_ME
Task.task_type_from_func('copy', vars=[], func=action_process_file_func)
TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen

775
tools/wafadmin/Tools/msvc.py

@ -0,0 +1,775 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2006 (dv)
# Tamas Pal, 2007 (folti)
# Nicolas Mercier, 2009
# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
# usage:
#
# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
# conf.env['MSVC_TARGETS'] = ['x64']
# conf.check_tool('msvc')
# OR conf.check_tool('msvc', funs='no_autodetect')
# conf.check_lib_msvc('gdi32')
# conf.check_libs_msvc('kernel32 user32', mandatory=true)
# ...
# obj.uselib = 'KERNEL32 USER32 GDI32'
#
# platforms and targets will be tested in the order they appear;
# the first good configuration will be used
# supported platforms :
# ia64, x64, x86, x86_amd64, x86_ia64
# compilers supported :
# msvc => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
# wsdk => Windows SDK, versions 6.0, 6.1, 7.0
# icl => Intel compiler, versions 9,10,11
# Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
# PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
import os, sys, re, string, optparse
import Utils, TaskGen, Runner, Configure, Task, Options
from Logs import debug, info, warn, error
from TaskGen import after, before, feature
from Configure import conftest, conf
import ccroot, cc, cxx, ar, winres
from libtool import read_la_file
import _winreg
pproc = Utils.pproc
# importlibs provided by MSVC/Platform SDK. Do NOT search them....
g_msvc_systemlibs = """
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
""".split()
all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
def setup_msvc(conf, versions):
platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
versiondict = dict(versions)
for version in desired_versions:
try:
targets = dict(versiondict [version])
for target in platforms:
try:
arch,(p1,p2,p3) = targets[target]
compiler,version = version.split()
return compiler,p1,p2,p3
except KeyError: continue
except KeyError: continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf, compiler, version, target, vcvars):
debug('msvc: get_msvc_version: ' + compiler + ' ' + version + ' ' + target + ' ...')
batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
f = open(batfile, 'w')
f.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%
""" % (vcvars,target))
f.close()
sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
lines = sout.splitlines()
for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
if lines[0].find(x) != -1:
break
else:
debug('msvc: get_msvc_version: %r %r %r -> not found' % (compiler, version, target))
conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
for line in lines[1:]:
if line.startswith('PATH='):
path = line[5:]
MSVC_PATH = path.split(';')
elif line.startswith('INCLUDE='):
MSVC_INCDIR = [i for i in line[8:].split(';') if i]
elif line.startswith('LIB='):
MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
# Check if the compiler is usable at all.
# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
env = {}
env.update(os.environ)
env.update(PATH = path)
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
# delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
if env.has_key('CL'):
del(env['CL'])
try:
p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
out, err = p.communicate()
if p.returncode != 0:
raise Exception('return code: %r: %r' % (p.returncode, err))
except Exception, e:
debug('msvc: get_msvc_version: %r %r %r -> failure' % (compiler, version, target))
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
debug('msvc: get_msvc_version: %r %r %r -> OK' % (compiler, version, target))
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
@conf
def gather_wsdk_versions(conf, versions):
version_pattern = re.compile('^v..?.?\...?.?')
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
return
index = 0
while 1:
try:
version = _winreg.EnumKey(all_versions, index)
except WindowsError:
break
index = index + 1
if not version_pattern.match(version):
continue
try:
msvc_version = _winreg.OpenKey(all_versions, version)
path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
except WindowsError:
continue
if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
targets = []
for target,arch in all_msvc_platforms:
try:
targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
except Configure.ConfigurationError:
pass
versions.append(('wsdk ' + version[1:], targets))
@conf
def gather_msvc_versions(conf, versions):
# checks SmartPhones SDKs
try:
ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
try:
ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
ce_sdk = ''
if ce_sdk:
supported_wince_platforms = []
ce_index = 0
while 1:
try:
sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
except WindowsError:
break
ce_index = ce_index + 1
sdk = _winreg.OpenKey(ce_sdk, sdk_device)
path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
path=str(path)
path,device = os.path.split(path)
if not device:
path,device = os.path.split(path)
for arch,compiler in all_wince_platforms:
platforms = []
if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
if platforms:
supported_wince_platforms.append((device, platforms))
# checks MSVC
version_pattern = re.compile('^..?\...?')
for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
except WindowsError:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
except WindowsError:
continue
index = 0
while 1:
try:
version = _winreg.EnumKey(all_versions, index)
except WindowsError:
break
index = index + 1
if not version_pattern.match(version):
continue
try:
msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
path=str(path)
targets = []
if ce_sdk:
for device,platforms in supported_wince_platforms:
cetargets = []
for platform,compiler,include,lib in platforms:
winCEpath = os.path.join(path, 'VC', 'ce')
if os.path.isdir(winCEpath):
common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
versions.append((device+' '+version, cetargets))
if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
except:
pass
elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
try:
targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
except Configure.ConfigurationError:
pass
versions.append(('msvc '+version, targets))
except WindowsError:
continue
@conf
def gather_icl_versions(conf, versions):
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
except WindowsError:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
except WindowsError:
return
index = 0
while 1:
try:
version = _winreg.EnumKey(all_versions, index)
except WindowsError:
break
index = index + 1
if not version_pattern.match(version):
continue
targets = []
for target,arch in all_icl_platforms:
try:
icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
try:
targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
except Configure.ConfigurationError:
pass
except WindowsError:
continue
major = version[0:2]
versions.append(('intel ' + major, targets))
@conf
def get_msvc_versions(conf):
if not conf.env['MSVC_INSTALLED_VERSIONS']:
conf.env['MSVC_INSTALLED_VERSIONS'] = []
conf.gather_msvc_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
conf.gather_wsdk_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
conf.gather_icl_versions(conf.env['MSVC_INSTALLED_VERSIONS'])
return conf.env['MSVC_INSTALLED_VERSIONS']
@conf
def print_all_msvc_detected(conf):
for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
info(version)
for target,l in targets:
info("\t"+target)
def detect_msvc(conf):
versions = get_msvc_versions(conf)
return setup_msvc(conf, versions)
@conf
def find_lt_names_msvc(self, libname, is_static=False):
"""
Win32/MSVC specific code to glean out information from libtool la files.
this function is not attached to the task_gen class
"""
lt_names=[
'lib%s.la' % libname,
'%s.la' % libname,
]
for path in self.env['LIBPATH']:
for la in lt_names:
laf=os.path.join(path,la)
dll=None
if os.path.exists(laf):
ltdict=read_la_file(laf)
lt_libdir=None
if ltdict.get('libdir', ''):
lt_libdir = ltdict['libdir']
if not is_static and ltdict.get('library_names', ''):
dllnames=ltdict['library_names'].split()
dll=dllnames[0].lower()
dll=re.sub('\.dll$', '', dll)
return (lt_libdir, dll, False)
elif ltdict.get('old_library', ''):
olib=ltdict['old_library']
if os.path.exists(os.path.join(path,olib)):
return (path, olib, True)
elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
return (lt_libdir, olib, True)
else:
return (None, olib, True)
else:
raise Utils.WafError('invalid libtool object file: %s' % laf)
return (None, None, None)
@conf
def libname_msvc(self, libname, is_static=False, mandatory=False):
lib = libname.lower()
lib = re.sub('\.lib$','',lib)
if lib in g_msvc_systemlibs:
return lib
lib=re.sub('^lib','',lib)
if lib == 'm':
return None
(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
if lt_path != None and lt_libname != None:
if lt_static == True:
# file existance check has been made by find_lt_names
return os.path.join(lt_path,lt_libname)
if lt_path != None:
_libpaths=[lt_path] + self.env['LIBPATH']
else:
_libpaths=self.env['LIBPATH']
static_libs=[
'lib%ss.lib' % lib,
'lib%s.lib' % lib,
'%ss.lib' % lib,
'%s.lib' %lib,
]
dynamic_libs=[
'lib%s.dll.lib' % lib,
'lib%s.dll.a' % lib,
'%s.dll.lib' % lib,
'%s.dll.a' % lib,
'lib%s_d.lib' % lib,
'%s_d.lib' % lib,
'%s.lib' %lib,
]
libnames=static_libs
if not is_static:
libnames=dynamic_libs + static_libs
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path, libn)):
debug('msvc: lib found: %s' % os.path.join(path,libn))
return re.sub('\.lib$', '',libn)
#if no lib can be found, just return the libname as msvc expects it
if mandatory:
self.fatal("The library %r could not be found" % libname)
return re.sub('\.lib$', '', libname)
@conf
def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
"This is the api to use"
libn = self.libname_msvc(libname, is_static, mandatory)
if not uselib_store:
uselib_store = libname.upper()
# Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
# but we don't distinguish static libs from shared libs.
# This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
if False and is_static: # disabled
self.env['STATICLIB_' + uselib_store] = [libn]
else:
self.env['LIB_' + uselib_store] = [libn]
@conf
def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
for libname in Utils.to_list(libnames):
self.check_lib_msvc(libname, is_static, mandatory=mandatory)
@conftest
def no_autodetect(conf):
conf.eval_rules(detect.replace('autodetect', ''))
detect = '''
autodetect
find_msvc
msvc_common_flags
cc_load_tools
cxx_load_tools
cc_add_flags
cxx_add_flags
'''
@conftest
def autodetect(conf):
v = conf.env
compiler, path, includes, libdirs = detect_msvc(conf)
v['PATH'] = path
v['CPPPATH'] = includes
v['LIBPATH'] = libdirs
v['MSVC_COMPILER'] = compiler
def _get_prog_names(conf, compiler):
if compiler=='intel':
compiler_name = 'ICL'
linker_name = 'XILINK'
lib_name = 'XILIB'
else:
# assumes CL.exe
compiler_name = 'CL'
linker_name = 'LINK'
lib_name = 'LIB'
return compiler_name, linker_name, lib_name
@conftest
def find_msvc(conf):
# due to path format limitations, limit operation only to native Win32. Yeah it sucks.
if sys.platform != 'win32':
conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
v = conf.env
compiler, path, includes, libdirs = detect_msvc(conf)
v['PATH'] = path
v['CPPPATH'] = includes
v['LIBPATH'] = libdirs
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
# compiler
cxx = None
if v['CXX']: cxx = v['CXX']
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path)
if not cxx: conf.fatal('%s was not found (compiler)' % compiler_name)
cxx = conf.cmd_to_list(cxx)
# before setting anything, check if the compiler is really msvc
env = dict(conf.environ)
env.update(PATH = ';'.join(path))
if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
conf.fatal('the msvc compiler could not be identified')
# c/c++ compiler
v['CC'] = v['CXX'] = cxx
v['CC_NAME'] = v['CXX_NAME'] = 'msvc'
# environment flags
try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
except KeyError: pass
try: v.prepend_value('LIBPATH', conf.environ['LIB'])
except KeyError: pass
# linker
if not v['LINK_CXX']:
link = conf.find_program(linker_name, path_list=path)
if link: v['LINK_CXX'] = link
else: conf.fatal('%s was not found (linker)' % linker_name)
v['LINK'] = link
if not v['LINK_CC']: v['LINK_CC'] = v['LINK_CXX']
# staticlib linker
if not v['AR']:
stliblink = conf.find_program(lib_name, path_list=path)
if not stliblink: return
v['AR'] = stliblink
v['ARFLAGS'] = ['/NOLOGO']
# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
manifesttool = conf.find_program('MT', path_list=path)
if manifesttool:
v['MT'] = manifesttool
v['MTFLAGS'] = ['/NOLOGO']
conf.check_tool('winres')
if not conf.env['WINRC']:
warn('Resource compiler not found. Compiling resource file is disabled')
@conftest
def msvc_common_flags(conf):
v = conf.env
v['CPPFLAGS'] = ['/W3', '/nologo', '/EHsc']
v['CCDEFINES_ST'] = '/D%s'
v['CXXDEFINES_ST'] = '/D%s'
# TODO just use _WIN32, which defined by the compiler itself!
v['CCDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
v['CXXDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
v['_CCINCFLAGS'] = []
v['_CCDEFFLAGS'] = []
v['_CXXINCFLAGS'] = []
v['_CXXDEFFLAGS'] = []
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['/c', '/Fo']
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['/c', '/Fo']
v['CPPPATH_ST'] = '/I%s' # template for adding include paths
v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
# Subsystem specific flags
v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE']
v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE']
v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX']
v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS']
v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
# CRT specific flags
v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
# TODO these are defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
# TODO these are defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
# compiler debug levels
v['CCFLAGS'] = ['/TC']
v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
# TODO _DEBUG is defined by the compiler itself!
v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI']
v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI']
v['CXXFLAGS'] = ['/TP']
v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
# TODO _DEBUG is defined by the compiler itself!
v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI']
v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/D_DEBUG', '/ZI']
# linker
v['LIB'] = []
v['LIB_ST'] = '%s.lib' # template for adding libs
v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths
v['STATICLIB_ST'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
v['LINKFLAGS'] = ['/NOLOGO', '/MANIFEST']
# shared library
v['shlib_CCFLAGS'] = ['']
v['shlib_CXXFLAGS'] = ['']
v['shlib_LINKFLAGS']= ['/DLL']
v['shlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = '%s.lib'
v['IMPLIB_ST'] = '/IMPLIB:%s'
# static library
v['staticlib_LINKFLAGS'] = ['']
v['staticlib_PATTERN'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
# program
v['program_PATTERN'] = '%s.exe'
#######################################################################################################
##### conf above, build below
@after('apply_link')
@feature('cc', 'cxx')
def apply_flags_msvc(self):
if self.env.CC_NAME != 'msvc':
return
subsystem = getattr(self, 'subsystem', '')
if subsystem:
subsystem = '/subsystem:%s' % subsystem
flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
self.env.append_value(flags, subsystem)
if 'cstaticlib' not in self.features:
for d in (f.lower() for f in self.env.LINKFLAGS):
if d[1:] == 'debug':
pdbnode = self.link_task.outputs[0].change_ext('.pdb')
pdbfile = pdbnode.bldpath(self.env)
self.link_task.outputs.append(pdbnode)
self.bld.install_files(self.install_path, [pdbnode], env=self.env)
break
@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_lib_vars')
@before('apply_obj_vars')
def apply_obj_vars_msvc(self):
if self.env['CC_NAME'] != 'msvc':
return
try:
self.meths.remove('apply_obj_vars')
except ValueError:
pass
libpaths = getattr(self, 'libpaths', [])
if not libpaths: self.libpaths = libpaths
env = self.env
app = env.append_unique
cpppath_st = env['CPPPATH_ST']
lib_st = env['LIB_ST']
staticlib_st = env['STATICLIB_ST']
libpath_st = env['LIBPATH_ST']
staticlibpath_st = env['STATICLIBPATH_ST']
for i in env['LIBPATH']:
app('LINKFLAGS', libpath_st % i)
if not libpaths.count(i):
libpaths.append(i)
for i in env['LIBPATH']:
app('LINKFLAGS', staticlibpath_st % i)
if not libpaths.count(i):
libpaths.append(i)
# i doubt that anyone will make a fully static binary anyway
if not env['FULLSTATIC']:
if env['STATICLIB'] or env['LIB']:
app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
for i in env['STATICLIB']:
app('LINKFLAGS', staticlib_st % i)
for i in env['LIB']:
app('LINKFLAGS', lib_st % i)
# split the manifest file processing from the link task, like for the rc processing
@feature('cprogram', 'cshlib')
@after('apply_link')
def apply_manifest(self):
"""Special linker for MSVC with support for embedding manifests into DLL's
and executables compiled by Visual Studio 2005 or probably later. Without
the manifest file, the binaries are unusable.
See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
Problems with this tool: it is always called whether MSVC creates manifests or not."""
if self.env.CC_NAME != 'msvc':
return
tsk = self.create_task('msvc_manifest')
tsk.set_inputs(self.link_task.outputs[0])
def exec_mf(self):
env = self.env
outfile = self.inputs[0].bldpath(env)
manifest = outfile + '.manifest'
if os.path.exists(manifest):
debug('msvc: manifesttool')
mtool = env['MT']
if not mtool:
return 0
mode = ''
# embedding mode. Different for EXE's and DLL's.
# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
if 'cprogram' in self.generator.features:
mode = '1'
elif 'cshlib' in self.generator.features:
mode = '2'
debug('msvc: embedding manifest')
#flags = ' '.join(env['MTFLAGS'] or [])
lst = []
lst.extend(Utils.to_list(env['MT']))
lst.extend(Utils.to_list(env['MTFLAGS']))
lst.extend(Utils.to_list("-manifest"))
lst.extend(Utils.to_list(manifest))
lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
#cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
# manifest, outfile, mode)
lst = [lst]
ret = self.exec_command(*lst)
return ret
cls = Task.task_type_from_func('msvc_manifest', vars=['MT', 'MTFLAGS'], color='BLUE', func=exec_mf, ext_in='.bin')
cls.quiet = 1
########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
def exec_command_msvc(self, *k, **kw):
"instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
if self.env['CC_NAME'] == 'msvc':
if isinstance(k[0], list):
lst = []
carry = ''
for a in k[0]:
if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
carry = a
else:
lst.append(carry + a)
carry = ''
k = [lst]
env = dict(os.environ)
env.update(PATH = ';'.join(self.env['PATH']))
kw['env'] = env
return self.generator.bld.exec_command(*k, **kw)
for k in 'cc cxx winrc cc_link cxx_link static_link'.split():
cls = Task.TaskBase.classes.get(k, None)
if cls:
cls.exec_command = exec_command_msvc

52
tools/wafadmin/Tools/nasm.py

@ -0,0 +1,52 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008
"""
Nasm processing
"""
import os
import TaskGen, Task, Utils
from TaskGen import taskgen, before, extension
nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
@before('apply_link')
def apply_nasm_vars(self):
# flags
if hasattr(self, 'nasm_flags'):
for flag in self.to_list(self.nasm_flags):
self.env.append_value('NASM_FLAGS', flag)
# includes - well, if we suppose it works with c processing
if hasattr(self, 'includes'):
for inc in self.to_list(self.includes):
node = self.path.find_dir(inc)
if not node:
raise Utils.WafError('cannot find the dir' + inc)
self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
@extension(EXT_NASM)
def nasm_file(self, node):
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task = self.create_task('nasm')
task.inputs = [node]
task.outputs = [node.change_ext(obj_ext)]
self.compiled_tasks.append(task)
self.meths.append('apply_nasm_vars')
# create our action here
Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
def detect(conf):
nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)

313
tools/wafadmin/Tools/ocaml.py

@ -0,0 +1,313 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"ocaml support"
import os, re
import TaskGen, Utils, Task, Build
from Logs import error
from TaskGen import taskgen, feature, before, after, extension
EXT_MLL = ['.mll']
EXT_MLY = ['.mly']
EXT_MLI = ['.mli']
EXT_MLC = ['.c']
EXT_ML = ['.ml']
open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
def filter_comments(txt):
meh = [0]
def repl(m):
if m.group(1): meh[0] += 1
elif m.group(2): meh[0] -= 1
elif not meh[0]: return m.group(0)
return ''
return foo.sub(repl, txt)
def scan(self):
node = self.inputs[0]
code = filter_comments(node.read(self.env))
global open_re
names = []
import_iterator = open_re.finditer(code)
if import_iterator:
for import_match in import_iterator:
names.append(import_match.group(1))
found_lst = []
raw_lst = []
for name in names:
nd = None
for x in self.incpaths:
nd = x.find_resource(name.lower()+'.ml')
if not nd: nd = x.find_resource(name+'.ml')
if nd:
found_lst.append(nd)
break
else:
raw_lst.append(name)
return (found_lst, raw_lst)
native_lst=['native', 'all', 'c_object']
bytecode_lst=['bytecode', 'all']
class ocaml_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('ocaml')
def init_ml(self):
Utils.def_attrs(self,
type = 'all',
incpaths_lst = [],
bld_incpaths_lst = [],
mlltasks = [],
mlytasks = [],
mlitasks = [],
native_tasks = [],
bytecode_tasks = [],
linktasks = [],
bytecode_env = None,
native_env = None,
compiled_tasks = [],
includes = '',
uselib = '',
are_deps_set = 0)
@feature('ocaml')
@after('init_ml')
def init_envs_ml(self):
self.islibrary = getattr(self, 'islibrary', False)
global native_lst, bytecode_lst
self.native_env = None
if self.type in native_lst:
self.native_env = self.env.copy()
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
self.bytecode_env = None
if self.type in bytecode_lst:
self.bytecode_env = self.env.copy()
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
if self.type == 'c_object':
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
@feature('ocaml')
@before('apply_vars_ml')
@after('init_envs_ml')
def apply_incpaths_ml(self):
inc_lst = self.includes.split()
lst = self.incpaths_lst
for dir in inc_lst:
node = self.path.find_dir(dir)
if not node:
error("node not found: " + str(dir))
continue
self.bld.rescan(node)
if not node in lst: lst.append(node)
self.bld_incpaths_lst.append(node)
# now the nodes are added to self.incpaths_lst
@feature('ocaml')
@before('apply_core')
def apply_vars_ml(self):
for i in self.incpaths_lst:
if self.bytecode_env:
app = self.bytecode_env.append_value
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.srcpath(self.env))
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.bldpath(self.env))
if self.native_env:
app = self.native_env.append_value
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.bldpath(self.env))
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.srcpath(self.env))
varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
for name in self.uselib.split():
for vname in varnames:
cnt = self.env[vname+'_'+name]
if cnt:
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
if self.native_env: self.native_env.append_value(vname, cnt)
@feature('ocaml')
@after('apply_core')
def apply_link_ml(self):
if self.bytecode_env:
ext = self.islibrary and '.cma' or '.run'
linktask = self.create_task('ocalink')
linktask.bytecode = 1
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.obj = self
linktask.env = self.bytecode_env
self.linktasks.append(linktask)
if self.native_env:
if self.type == 'c_object': ext = '.o'
elif self.islibrary: ext = '.cmxa'
else: ext = ''
linktask = self.create_task('ocalinkx')
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.obj = self
linktask.env = self.native_env
self.linktasks.append(linktask)
# we produce a .o file to be used by gcc
self.compiled_tasks.append(linktask)
@extension(EXT_MLL)
def mll_hook(self, node):
mll_task = self.create_task('ocamllex', self.native_env)
mll_task.set_inputs(node)
mll_task.set_outputs(node.change_ext('.ml'))
self.mlltasks.append(mll_task)
self.allnodes.append(mll_task.outputs[0])
@extension(EXT_MLY)
def mly_hook(self, node):
mly_task = self.create_task('ocamlyacc', self.native_env)
mly_task.set_inputs(node)
mly_task.set_outputs([node.change_ext('.ml'), node.change_ext('.mli')])
self.mlytasks.append(mly_task)
self.allnodes.append(mly_task.outputs[0])
task = self.create_task('ocamlcmi', self.native_env)
task.set_inputs(mly_task.outputs[1])
task.set_outputs(mly_task.outputs[1].change_ext('.cmi'))
@extension(EXT_MLI)
def mli_hook(self, node):
task = self.create_task('ocamlcmi', self.native_env)
task.set_inputs(node)
task.set_outputs(node.change_ext('.cmi'))
self.mlitasks.append(task)
@extension(EXT_MLC)
def mlc_hook(self, node):
task = self.create_task('ocamlcc', self.native_env)
task.set_inputs(node)
task.set_outputs(node.change_ext('.o'))
self.compiled_tasks.append(task)
@extension(EXT_ML)
def ml_hook(self, node):
if self.native_env:
task = self.create_task('ocamlx', self.native_env)
task.set_inputs(node)
task.set_outputs(node.change_ext('.cmx'))
task.obj = self
task.incpaths = self.bld_incpaths_lst
self.native_tasks.append(task)
if self.bytecode_env:
task = self.create_task('ocaml', self.bytecode_env)
task.set_inputs(node)
task.obj = self
task.bytecode = 1
task.incpaths = self.bld_incpaths_lst
task.set_outputs(node.change_ext('.cmo'))
self.bytecode_tasks.append(task)
def compile_may_start(self):
if not getattr(self, 'flag_deps', ''):
self.flag_deps = 1
# the evil part is that we can only compute the dependencies after the
# source files can be read (this means actually producing the source files)
if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
else: alltasks = self.obj.native_tasks
self.signature() # ensure that files are scanned - unfortunately
tree = self.generator.bld
env = self.env
for node in self.inputs:
lst = tree.node_deps[self.unique_id()]
for depnode in lst:
for t in alltasks:
if t == self: continue
if depnode in t.inputs:
self.set_run_after(t)
# TODO necessary to get the signature right - for now
delattr(self, 'cache_sig')
self.signature()
return Task.Task.runnable_status(self)
b = Task.simple_task_type
cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
cls.runnable_status = compile_may_start
cls.scan = scan
b = Task.simple_task_type
cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
cls.runnable_status = compile_may_start
cls.scan = scan
b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
def link_may_start(self):
if not getattr(self, 'order', ''):
# now reorder the inputs given the task dependencies
if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
else: alltasks = self.obj.native_tasks
# this part is difficult, we do not have a total order on the tasks
# if the dependencies are wrong, this may not stop
seen = []
pendant = []+alltasks
while pendant:
task = pendant.pop(0)
if task in seen: continue
for x in task.run_after:
if not x in seen:
pendant.append(task)
break
else:
seen.append(task)
self.inputs = [x.outputs[0] for x in seen]
self.order = 1
return Task.Task.runnable_status(self)
act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
act.runnable_status = link_may_start
act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
act.runnable_status = link_may_start
def detect(conf):
opt = conf.find_program('ocamlopt', var='OCAMLOPT')
occ = conf.find_program('ocamlc', var='OCAMLC')
if (not opt) or (not occ):
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
v = conf.env
v['OCAMLC'] = occ
v['OCAMLOPT'] = opt
v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX')
v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC')
v['OCAMLFLAGS'] = ''
v['OCAMLLIB'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['LIB_OCAML'] = 'camlrun'

185
tools/wafadmin/Tools/osx.py

@ -0,0 +1,185 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2008
"""MacOSX related tools
To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
obj.mac_app = True
To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
obj.mac_bundle = True
"""
import os, shutil, sys, platform
import TaskGen, Task, Build, Options, Utils
from TaskGen import taskgen, feature, after, before
from Logs import error, debug
# plist template
app_info = '''
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
<plist version="0.9">
<dict>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleGetInfoString</key>
<string>Created by Waf</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>NOTE</key>
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
<key>CFBundleExecutable</key>
<string>%s</string>
</dict>
</plist>
'''
# see WAF issue 285
# and also http://trac.macports.org/ticket/17059
@feature('cc', 'cxx')
@before('apply_lib_vars')
def set_macosx_deployment_target(self):
if self.env['MACOSX_DEPLOYMENT_TARGET']:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
if sys.platform == 'darwin':
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
@feature('cc', 'cxx')
@after('apply_lib_vars')
def apply_framework(self):
for x in self.to_list(self.env['FRAMEWORKPATH']):
frameworkpath_st = '-F%s'
self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
self.env.append_unique('CCFLAGS', frameworkpath_st % x)
self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
for x in self.to_list(self.env['FRAMEWORK']):
self.env.append_value('LINKFLAGS', ['-framework', x])
@taskgen
def create_bundle_dirs(self, name, out):
bld = self.bld
dir = out.parent.get_dir(name)
if not dir:
dir = out.__class__(name, out.parent, 1)
bld.rescan(dir)
contents = out.__class__('Contents', dir, 1)
bld.rescan(contents)
macos = out.__class__('MacOS', contents, 1)
bld.rescan(macos)
return dir
def bundle_name_for_output(out):
name = out.name
k = name.rfind('.')
if k >= 0:
name = name[:k] + '.app'
else:
name = name + '.app'
return name
@taskgen
@after('apply_link')
@feature('cprogram')
def create_task_macapp(self):
"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
or use obj.mac_app = True to build specific targets as Mac apps"""
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
apptask = self.create_task('macapp', self.env)
apptask.set_inputs(self.link_task.outputs)
out = self.link_task.outputs[0]
name = bundle_name_for_output(out)
dir = self.create_bundle_dirs(name, out)
n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
apptask.set_outputs([n1])
apptask.chmod = 0755
apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
self.apptask = apptask
@after('apply_link')
@feature('cprogram')
def create_task_macplist(self):
"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
or use obj.mac_app = True to build specific targets as Mac apps"""
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
# check if the user specified a plist before using our template
if not getattr(self, 'mac_plist', False):
self.mac_plist = app_info
plisttask = self.create_task('macplist', self.env)
plisttask.set_inputs(self.link_task.outputs)
out = self.link_task.outputs[0]
self.mac_plist = self.mac_plist % (out.name)
name = bundle_name_for_output(out)
dir = self.create_bundle_dirs(name, out)
n1 = dir.find_or_declare(['Contents', 'Info.plist'])
plisttask.set_outputs([n1])
plisttask.mac_plist = self.mac_plist
plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
self.plisttask = plisttask
@after('apply_link')
@feature('cshlib')
def apply_link_osx(self):
name = self.link_task.outputs[0].name
if getattr(self, 'vnum', None):
name = name.replace('.dylib', '.%s.dylib' % self.vnum)
path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
if '-dynamiclib' in self.env['LINKFLAGS']:
self.env.append_value('LINKFLAGS', '-install_name')
self.env.append_value('LINKFLAGS', path)
@before('apply_link', 'apply_lib_vars')
@feature('cc', 'cxx')
def apply_bundle(self):
"""use env['MACBUNDLE'] to force all shlibs into mac bundles
or use obj.mac_bundle = True for specific targets only"""
if not ('cshlib' in self.features or 'shlib' in self.features): return
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
uselib = self.uselib = self.to_list(self.uselib)
if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
@after('apply_link')
@feature('cshlib')
def apply_bundle_remove_dynamiclib(self):
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
if not getattr(self, 'vnum', None):
try:
self.env['LINKFLAGS'].remove('-dynamiclib')
except ValueError:
pass
# TODO REMOVE IN 1.6 (global variable)
app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
def app_build(task):
env = task.env
shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
return 0
def plist_build(task):
env = task.env
f = open(task.outputs[0].abspath(env), "w")
f.write(task.mac_plist)
f.close()
return 0
Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")

120
tools/wafadmin/Tools/perl.py

@ -0,0 +1,120 @@
#!/usr/bin/env python
# encoding: utf-8
# andersg at 0x63.nu 2007
import os
import Task, Options, Utils
from Configure import conf
from TaskGen import extension, taskgen, feature, before
xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
EXT_XS = ['.xs']
@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
@feature('perlext')
def init_perlext(self):
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'PERL' in self.uselib: self.uselib.append('PERL')
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
@extension(EXT_XS)
def xsubpp_file(self, node):
gentask = self.create_task('xsubpp')
gentask.set_inputs(node)
outnode = node.change_ext('.c')
gentask.set_outputs(outnode)
self.allnodes.append(outnode)
Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before="cc cxx", shell=False)
@conf
def check_perl_version(conf, minver=None):
"""
Checks if perl is installed.
If installed the variable PERL will be set in environment.
Perl binary can be overridden by --with-perl-binary config variable
"""
res = True
if not getattr(Options.options, 'perlbinary', None):
perl = conf.find_program("perl", var="PERL")
if not perl:
return False
else:
perl = Options.options.perlbinary
conf.env['PERL'] = perl
version = Utils.cmd_output(perl + " -e'printf \"%vd\", $^V'")
if not version:
res = False
version = "Unknown"
elif not minver is None:
ver = tuple(map(int, version.split(".")))
if ver < minver:
res = False
if minver is None:
cver = ""
else:
cver = ".".join(map(str,minver))
conf.check_message("perl", cver, res, version)
return res
@conf
def check_perl_module(conf, module):
"""
Check if specified perlmodule is installed.
Minimum version can be specified by specifying it after modulename
like this:
conf.check_perl_module("Some::Module 2.92")
"""
cmd = [conf.env['PERL'], '-e', 'use %s' % module]
r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
conf.check_message("perl module %s" % module, "", r)
return r
@conf
def check_perl_ext_devel(conf):
"""
Check for configuration needed to build perl extensions.
Sets different xxx_PERLEXT variables in the environment.
Also sets the ARCHDIR_PERL variable useful as installation path,
which can be overridden by --with-perl-archdir option.
"""
if not conf.env['PERL']:
return False
perl = conf.env['PERL']
def read_out(cmd):
return Utils.to_list(Utils.cmd_output(perl + cmd))
conf.env["LINKFLAGS_PERLEXT"] = read_out(" -MConfig -e'print $Config{lddlflags}'")
conf.env["CPPPATH_PERLEXT"] = read_out(" -MConfig -e'print \"$Config{archlib}/CORE\"'")
conf.env["CCFLAGS_PERLEXT"] = read_out(" -MConfig -e'print \"$Config{ccflags} $Config{cccdlflags}\"'")
conf.env["XSUBPP"] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}\"'")
conf.env["EXTUTILS_TYPEMAP"] = read_out(" -MConfig -e'print \"$Config{privlib}/ExtUtils/typemap\"'")
if not getattr(Options.options, 'perlarchdir', None):
conf.env["ARCHDIR_PERL"] = Utils.cmd_output(perl + " -MConfig -e'print $Config{sitearch}'")
else:
conf.env["ARCHDIR_PERL"] = getattr(Options.options, 'perlarchdir')
conf.env['perlext_PATTERN'] = '%s.' + Utils.cmd_output(perl + " -MConfig -e'print $Config{dlext}'")
return True
def set_options(opt):
opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)

809
tools/wafadmin/Tools/preproc.py

@ -0,0 +1,809 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2009 (ita)
"""
C/C++ preprocessor for finding dependencies
Reasons for using the Waf preprocessor by default
1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
2. Not all compilers provide .d files for obtaining the dependencies (portability)
3. A naive file scanner will not catch the constructs such as "#include foo()"
4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
Regarding the speed concerns:
a. the preprocessing is performed only when files must be compiled
b. the macros are evaluated only for #if/#elif/#include
c. the time penalty is about 10%
d. system headers are not scanned
Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
during the compilation to track the dependencies (useful when used with the boost libraries).
It only works with gcc though, and it cannot be used with Qt builds. A dumb
file scanner will be added in the future, so we will have most bahaviours.
"""
# TODO: more varargs, pragma once
# TODO: dumb file scanner tracking all includes
import re, sys, os, string
import Logs, Build, Utils
from Logs import debug, error
import traceback
class PreprocError(Utils.WafError):
pass
POPFILE = '-'
go_absolute = 0
"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
standard_includes = ['/usr/include']
if sys.platform == "win32":
standard_includes = []
use_trigraphs = 0
'apply the trigraph rules first'
strict_quotes = 0
"Keep <> for system includes (do not search for those includes)"
g_optrans = {
'not':'!',
'and':'&&',
'bitand':'&',
'and_eq':'&=',
'or':'||',
'bitor':'|',
'or_eq':'|=',
'xor':'^',
'xor_eq':'^=',
'compl':'~',
}
"these ops are for c++, to reset, set an empty dict"
# ignore #warning and #error
re_lines = re.compile(\
'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
re.IGNORECASE | re.MULTILINE)
re_mac = re.compile("^[a-zA-Z_]\w*")
re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
re_cpp = re.compile(\
r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",
re.MULTILINE)
trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
NUM = 'i'
OP = 'O'
IDENT = 'T'
STR = 's'
CHAR = 'c'
tok_types = [NUM, STR, IDENT, OP]
exp_types = [
r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
r'L?"([^"\\]|\\.)*"',
r'[a-zA-Z_]\w*',
r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
]
re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
accepted = 'a'
ignored = 'i'
undefined = 'u'
skipped = 's'
def repl(m):
s = m.group(1)
if s is not None: return ' '
s = m.group(3)
if s is None: return ''
return s
def filter_comments(filename):
# return a list of tuples : keyword, line
code = Utils.readf(filename)
if use_trigraphs:
for (a, b) in trig_def: code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
prec = {}
# op -> number, needed for such expressions: #if 1 && 2 != 0
ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
for x in range(len(ops)):
syms = ops[x]
for u in syms.split():
prec[u] = x
def reduce_nums(val_1, val_2, val_op):
"""apply arithmetic rules and try to return an integer result"""
#print val_1, val_2, val_op
# now perform the operation, make certain a and b are numeric
try: a = 0 + val_1
except TypeError: a = int(val_1)
try: b = 0 + val_2
except TypeError: b = int(val_2)
d = val_op
if d == '%': c = a%b
elif d=='+': c = a+b
elif d=='-': c = a-b
elif d=='*': c = a*b
elif d=='/': c = a/b
elif d=='^': c = a^b
elif d=='|': c = a|b
elif d=='||': c = int(a or b)
elif d=='&': c = a&b
elif d=='&&': c = int(a and b)
elif d=='==': c = int(a == b)
elif d=='!=': c = int(a != b)
elif d=='<=': c = int(a <= b)
elif d=='<': c = int(a < b)
elif d=='>': c = int(a > b)
elif d=='>=': c = int(a >= b)
elif d=='^': c = int(a^b)
elif d=='<<': c = a<<b
elif d=='>>': c = a>>b
else: c = 0
return c
def get_num(lst):
if not lst: raise PreprocError("empty list for get_num")
(p, v) = lst[0]
if p == OP:
if v == '(':
count_par = 1
i = 1
while i < len(lst):
(p, v) = lst[i]
if p == OP:
if v == ')':
count_par -= 1
if count_par == 0:
break
elif v == '(':
count_par += 1
i += 1
else:
raise PreprocError("rparen expected %r" % lst)
(num, _) = get_term(lst[1:i])
return (num, lst[i+1:])
elif v == '+':
return get_num(lst[1:])
elif v == '-':
num, lst = get_num(lst[1:])
return (reduce_nums('-1', num, '*'), lst)
elif v == '!':
num, lst = get_num(lst[1:])
return (int(not int(num)), lst)
elif v == '~':
return (~ int(num), lst)
else:
raise PreprocError("invalid op token %r for get_num" % lst)
elif p == NUM:
return v, lst[1:]
elif p == IDENT:
# all macros should have been replaced, remaining identifiers eval to 0
return 0, lst[1:]
else:
raise PreprocError("invalid token %r for get_num" % lst)
def get_term(lst):
if not lst: raise PreprocError("empty list for get_term")
num, lst = get_num(lst)
if not lst:
return (num, [])
(p, v) = lst[0]
if p == OP:
if v == '&&' and not num:
return (num, [])
elif v == '||' and num:
return (num, [])
elif v == ',':
# skip
return get_term(lst[1:])
elif v == '?':
count_par = 0
i = 1
while i < len(lst):
(p, v) = lst[i]
if p == OP:
if v == ')':
count_par -= 1
elif v == '(':
count_par += 1
elif v == ':':
if count_par == 0:
break
i += 1
else:
raise PreprocError("rparen expected %r" % lst)
if int(num):
return get_term(lst[1:i])
else:
return get_term(lst[i+1:])
else:
num2, lst = get_num(lst[1:])
if not lst:
# no more tokens to process
num2 = reduce_nums(num, num2, v)
return get_term([(NUM, num2)] + lst)
# operator precedence
p2, v2 = lst[0]
if p2 != OP:
raise PreprocError("op expected %r" % lst)
if prec[v2] >= prec[v]:
num2 = reduce_nums(num, num2, v)
return get_term([(NUM, num2)] + lst)
else:
num3, lst = get_num(lst[1:])
num3 = reduce_nums(num2, num3, v2)
return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
raise PreprocError("cannot reduce %r" % lst)
def reduce_eval(lst):
"""take a list of tokens and output true or false (#if/#elif conditions)"""
num, lst = get_term(lst)
return (NUM, num)
def stringize(lst):
"""use for converting a list of tokens to a string"""
lst = [str(v2) for (p2, v2) in lst]
return "".join(lst)
def paste_tokens(t1, t2):
"""
here is what we can paste:
a ## b -> ab
> ## = -> >=
a ## 2 -> a2
"""
p1 = None
if t1[0] == OP and t2[0] == OP:
p1 = OP
elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
p1 = IDENT
elif t1[0] == NUM and t2[0] == NUM:
p1 = NUM
if not p1:
raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
return (p1, t1[1] + t2[1])
def reduce_tokens(lst, defs, ban=[]):
"""replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
i = 0
while i < len(lst):
(p, v) = lst[i]
if p == IDENT and v == "defined":
del lst[i]
if i < len(lst):
(p2, v2) = lst[i]
if p2 == IDENT:
if v2 in defs:
lst[i] = (NUM, 1)
else:
lst[i] = (NUM, 0)
elif p2 == OP and v2 == '(':
del lst[i]
(p2, v2) = lst[i]
del lst[i] # remove the ident, and change the ) for the value
if v2 in defs:
lst[i] = (NUM, 1)
else:
lst[i] = (NUM, 0)
else:
raise PreprocError("invalid define expression %r" % lst)
elif p == IDENT and v in defs:
if isinstance(defs[v], str):
a, b = extract_macro(defs[v])
defs[v] = b
macro_def = defs[v]
to_add = macro_def[1]
if isinstance(macro_def[0], list):
# macro without arguments
del lst[i]
for x in xrange(len(to_add)):
lst.insert(i, to_add[x])
i += 1
else:
# collect the arguments for the funcall
args = []
del lst[i]
if i >= len(lst):
raise PreprocError("expected '(' after %r (got nothing)" % v)
(p2, v2) = lst[i]
if p2 != OP or v2 != '(':
raise PreprocError("expected '(' after %r" % v)
del lst[i]
one_param = []
count_paren = 0
while i < len(lst):
p2, v2 = lst[i]
del lst[i]
if p2 == OP and count_paren == 0:
if v2 == '(':
one_param.append((p2, v2))
count_paren += 1
elif v2 == ')':
if one_param: args.append(one_param)
break
elif v2 == ',':
if not one_param: raise PreprocError("empty param in funcall %s" % p)
args.append(one_param)
one_param = []
else:
one_param.append((p2, v2))
else:
one_param.append((p2, v2))
if v2 == '(': count_paren += 1
elif v2 == ')': count_paren -= 1
else:
raise PreprocError('malformed macro')
# substitute the arguments within the define expression
accu = []
arg_table = macro_def[0]
j = 0
while j < len(to_add):
(p2, v2) = to_add[j]
if p2 == OP and v2 == '#':
# stringize is for arguments only
if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
toks = args[arg_table[to_add[j+1][1]]]
accu.append((STR, stringize(toks)))
j += 1
else:
accu.append((p2, v2))
elif p2 == OP and v2 == '##':
# token pasting, how can man invent such a complicated system?
if accu and j+1 < len(to_add):
# we have at least two tokens
t1 = accu[-1]
if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
toks = args[arg_table[to_add[j+1][1]]]
if toks:
accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
accu.extend(toks[1:])
else:
# error, case "a##"
accu.append((p2, v2))
accu.extend(toks)
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
# TODO not sure
# first collect the tokens
va_toks = []
st = len(macro_def[0])
pt = len(args)
for x in args[pt-st+1:]:
va_toks.extend(x)
va_toks.append((OP, ','))
if va_toks: va_toks.pop() # extra comma
if len(accu)>1:
(p3, v3) = accu[-1]
(p4, v4) = accu[-2]
if v3 == '##':
# remove the token paste
accu.pop()
if v4 == ',' and pt < st:
# remove the comma
accu.pop()
accu += va_toks
else:
accu[-1] = paste_tokens(t1, to_add[j+1])
j += 1
else:
# invalid paste, case "##a" or "b##"
accu.append((p2, v2))
elif p2 == IDENT and v2 in arg_table:
toks = args[arg_table[v2]]
reduce_tokens(toks, defs, ban+[v])
accu.extend(toks)
else:
accu.append((p2, v2))
j += 1
reduce_tokens(accu, defs, ban+[v])
for x in xrange(len(accu)-1, -1, -1):
lst.insert(i, accu[x])
i += 1
def eval_macro(lst, adefs):
"""reduce the tokens from the list lst, and try to return a 0/1 result"""
reduce_tokens(lst, adefs, [])
if not lst: raise PreprocError("missing tokens to evaluate")
(p, v) = reduce_eval(lst)
return int(v) != 0
def extract_macro(txt):
"""process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
t = tokenize(txt)
if re_fun.search(txt):
p, name = t[0]
p, v = t[1]
if p != OP: raise PreprocError("expected open parenthesis")
i = 1
pindex = 0
params = {}
prev = '('
while 1:
i += 1
p, v = t[i]
if prev == '(':
if p == IDENT:
params[v] = pindex
pindex += 1
prev = p
elif p == OP and v == ')':
break
else:
raise PreprocError("unexpected token (3)")
elif prev == IDENT:
if p == OP and v == ',':
prev = v
elif p == OP and v == ')':
break
else:
raise PreprocError("comma or ... expected")
elif prev == ',':
if p == IDENT:
params[v] = pindex
pindex += 1
prev = p
elif p == OP and v == '...':
raise PreprocError("not implemented (1)")
else:
raise PreprocError("comma or ... expected (2)")
elif prev == '...':
raise PreprocError("not implemented (2)")
else:
raise PreprocError("unexpected else")
#~ print (name, [params, t[i+1:]])
return (name, [params, t[i+1:]])
else:
(p, v) = t[0]
return (v, [[], t[1:]])
re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
def extract_include(txt, defs):
"""process a line in the form "#include foo" to return a string representing the file"""
m = re_include.search(txt)
if m:
if m.group('a'): return '<', m.group('a')
if m.group('b'): return '"', m.group('b')
# perform preprocessing and look at the result, it must match an include
toks = tokenize(txt)
reduce_tokens(toks, defs, ['waf_include'])
if not toks:
raise PreprocError("could not parse include %s" % txt)
if len(toks) == 1:
if toks[0][0] == STR:
return '"', toks[0][1]
else:
if toks[0][1] == '<' and toks[-1][1] == '>':
return stringize(toks).lstrip('<').rstrip('>')
raise PreprocError("could not parse include %s." % txt)
def parse_char(txt):
if not txt: raise PreprocError("attempted to parse a null char")
if txt[0] != '\\':
return ord(txt)
c = txt[1]
if c == 'x':
if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
return int(txt[2:], 16)
elif c.isdigit():
if c == '0' and len(txt)==2: return 0
for i in 3, 2, 1:
if len(txt) > i and txt[1:1+i].isdigit():
return (1+i, int(txt[1:1+i], 8))
else:
try: return chr_esc[c]
except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
def tokenize(s):
"""convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
ret = []
for match in re_clexer.finditer(s):
m = match.group
for name in tok_types:
v = m(name)
if v:
if name == IDENT:
try: v = g_optrans[v]; name = OP
except KeyError:
# c++ specific
if v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif name == NUM:
if m('oct'): v = int(v, 8)
elif m('hex'): v = int(m('hex'), 16)
elif m('n0'): v = m('n0')
else:
v = m('char')
if v: v = parse_char(v)
else: v = m('n2') or m('n4')
elif name == OP:
if v == '%:': v = '#'
elif v == '%:%:': v = '##'
elif name == STR:
# remove the quotes around the string
v = v[1:-1]
ret.append((name, v))
break
return ret
class c_parser(object):
def __init__(self, nodepaths=None, defines=None):
#self.lines = txt.split('\n')
self.lines = []
if defines is None:
self.defs = {}
else:
self.defs = dict(defines) # make a copy
self.state = []
self.env = None # needed for the variant when searching for files
self.count_files = 0
self.currentnode_stack = []
self.nodepaths = nodepaths or []
self.nodes = []
self.names = []
# file added
self.curfile = ''
self.ban_includes = []
def tryfind(self, filename):
self.curfile = filename
# for msvc it should be a for loop on the whole stack
found = self.currentnode_stack[-1].find_resource(filename)
for n in self.nodepaths:
if found:
break
found = n.find_resource(filename)
if not found:
if not filename in self.names:
self.names.append(filename)
else:
self.nodes.append(found)
if filename[-4:] != '.moc':
self.addlines(found)
return found
def addlines(self, node):
self.currentnode_stack.append(node.parent)
filepath = node.abspath(self.env)
self.count_files += 1
if self.count_files > 30000: raise PreprocError("recursion limit exceeded")
pc = self.parse_cache
debug('preproc: reading file %r' % filepath)
try:
lns = pc[filepath]
except KeyError:
pass
else:
self.lines = lns + self.lines
return
try:
lines = filter_comments(filepath)
lines.append((POPFILE, ''))
pc[filepath] = lines # cache the lines filtered
self.lines = lines + self.lines
except IOError:
raise PreprocError("could not read the file %s" % filepath)
except Exception:
if Logs.verbose > 0:
error("parsing %s failed" % filepath)
traceback.print_exc()
def start(self, node, env):
debug('preproc: scanning %s (in %s)' % (node.name, node.parent.name))
self.env = env
variant = node.variant(env)
bld = node.__class__.bld
try:
self.parse_cache = bld.parse_cache
except AttributeError:
bld.parse_cache = {}
self.parse_cache = bld.parse_cache
self.addlines(node)
if env['DEFLINES']:
self.lines = [('define', x) for x in env['DEFLINES']] + self.lines
while self.lines:
(kind, line) = self.lines.pop(0)
if kind == POPFILE:
self.currentnode_stack.pop()
continue
try:
self.process_line(kind, line)
except Exception, e:
if Logs.verbose:
debug('preproc: line parsing failed (%s): %s %s' % (e, line, Utils.ex_stack()))
def process_line(self, token, line):
ve = Logs.verbose
if ve: debug('preproc: line is %s - %s state is %s' % (token, line, self.state))
state = self.state
# make certain we define the state if we are about to enter in an if block
if token in ['ifdef', 'ifndef', 'if']:
state.append(undefined)
elif token == 'endif':
state.pop()
# skip lines when in a dead 'if' branch, wait for the endif
if not token in ['else', 'elif', 'endif']:
if skipped in self.state or ignored in self.state:
return
if token == 'if':
ret = eval_macro(tokenize(line), self.defs)
if ret: state[-1] = accepted
else: state[-1] = ignored
elif token == 'ifdef':
m = re_mac.search(line)
if m and m.group(0) in self.defs: state[-1] = accepted
else: state[-1] = ignored
elif token == 'ifndef':
m = re_mac.search(line)
if m and m.group(0) in self.defs: state[-1] = ignored
else: state[-1] = accepted
elif token == 'include' or token == 'import':
(kind, inc) = extract_include(line, self.defs)
if inc in self.ban_includes: return
if token == 'import': self.ban_includes.append(inc)
if ve: debug('preproc: include found %s (%s) ' % (inc, kind))
if kind == '"' or not strict_quotes:
self.tryfind(inc)
elif token == 'elif':
if state[-1] == accepted:
state[-1] = skipped
elif state[-1] == ignored:
if eval_macro(tokenize(line), self.defs):
state[-1] = accepted
elif token == 'else':
if state[-1] == accepted: state[-1] = skipped
elif state[-1] == ignored: state[-1] = accepted
elif token == 'define':
m = re_mac.search(line)
if m:
name = m.group(0)
if ve: debug('preproc: define %s %s' % (name, line))
self.defs[name] = line
else:
raise PreprocError("invalid define line %s" % line)
elif token == 'undef':
m = re_mac.search(line)
if m and m.group(0) in self.defs:
self.defs.__delitem__(m.group(0))
#print "undef %s" % name
elif token == 'pragma':
if re_pragma_once.search(line.lower()):
self.ban_includes.append(self.curfile)
def get_deps(node, env, nodepaths=[]):
"""
Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
#include some_macro()
"""
gruik = c_parser(nodepaths)
gruik.start(node, env)
return (gruik.nodes, gruik.names)
#################### dumb dependency scanner
re_inc = re.compile(\
'^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
re.IGNORECASE | re.MULTILINE)
def lines_includes(filename):
code = Utils.readf(filename)
if use_trigraphs:
for (a, b) in trig_def: code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
def get_deps_simple(node, env, nodepaths=[], defines={}):
"""
Get the dependencies by just looking recursively at the #include statements
"""
nodes = []
names = []
def find_deps(node):
lst = lines_includes(node.abspath(env))
for (_, line) in lst:
(t, filename) = extract_include(line, defines)
if filename in names:
continue
if filename.endswith('.moc'):
names.append(filename)
found = None
for n in nodepaths:
if found:
break
found = n.find_resource(filename)
if not found:
if not filename in names:
names.append(filename)
elif not found in nodes:
nodes.append(found)
find_deps(node)
find_deps(node)
return (nodes, names)

401
tools/wafadmin/Tools/python.py

@ -0,0 +1,401 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007 (ita)
# Gustavo Carneiro (gjc), 2007
"Python support"
import os, sys
import TaskGen, Utils, Utils, Runner, Options, Build
from Logs import debug, warn, info
from TaskGen import extension, taskgen, before, after, feature
from Configure import conf
EXT_PY = ['.py']
FRAG_2 = '''
#ifdef __cplusplus
extern "C" {
#endif
void Py_Initialize(void);
void Py_Finalize(void);
#ifdef __cplusplus
}
#endif
int main()
{
Py_Initialize();
Py_Finalize();
return 0;
}
'''
@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
@feature('pyext')
@before('apply_bundle')
def init_pyext(self):
self.default_install_path = '${PYTHONDIR}'
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'PYEXT' in self.uselib:
self.uselib.append('PYEXT')
self.env['MACBUNDLE'] = True
@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
@after('apply_bundle')
@feature('pyext')
def pyext_shlib_ext(self):
# override shlib_PATTERN set by the osx module
self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
@feature('pyembed')
def init_pyembed(self):
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'PYEMBED' in self.uselib:
self.uselib.append('PYEMBED')
@extension(EXT_PY)
def process_py(self, node):
if not (self.bld.is_install or self.install_path):
return
def inst_py(ctx):
install_pyfile(self, node)
self.bld.add_post_fun(inst_py)
def install_pyfile(self, node):
path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
if self.bld.is_install < 0:
info("* removing byte compiled python files")
for x in 'co':
try:
os.remove(path + x)
except OSError:
pass
if self.bld.is_install > 0:
if self.env['PYC'] or self.env['PYO']:
info("* byte compiling %r" % path)
if self.env['PYC']:
program = ("""
import sys, py_compile
for pyfile in sys.argv[1:]:
py_compile.compile(pyfile, pyfile + 'c')
""")
argv = [self.env['PYTHON'], '-c', program, path]
ret = Utils.pproc.Popen(argv).wait()
if ret:
raise Utils.WafError('bytecode compilation failed %r' % path)
if self.env['PYO']:
program = ("""
import sys, py_compile
for pyfile in sys.argv[1:]:
py_compile.compile(pyfile, pyfile + 'o')
""")
argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
ret = Utils.pproc.Popen(argv).wait()
if ret:
raise Utils.WafError('bytecode compilation failed %r' % path)
# COMPAT
class py_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@before('apply_core')
@after('vars_target_cprogram', 'vars_target_cstaticlib')
@feature('py')
def init_py(self):
self.default_install_path = '${PYTHONDIR}'
def _get_python_variables(python_exe, variables, imports=['import sys']):
"""Run a python interpreter and print some variables"""
program = list(imports)
program.append('')
for v in variables:
program.append("print(repr(%s))" % v)
os_env = dict(os.environ)
try:
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
except KeyError:
pass
proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
output = proc.communicate()[0].split("\n")
if proc.returncode:
if Options.options.verbose:
warn("Python program to extract python configuration variables failed:\n%s"
% '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
raise RuntimeError
return_values = []
for s in output:
s = s.strip()
if not s:
continue
if s == 'None':
return_values.append(None)
elif s[0] == "'" and s[-1] == "'":
return_values.append(s[1:-1])
elif s[0].isdigit():
return_values.append(int(s))
else: break
return return_values
@conf
def check_python_headers(conf):
"""Check for headers and libraries necessary to extend or embed python.
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
PYEXT: for compiling python extensions
PYEMBED: for embedding a python interpreter"""
if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
conf.fatal('load a compiler first (gcc, g++, ..)')
if not conf.env['PYTHON_VERSION']:
conf.check_python_version()
env = conf.env
python = env['PYTHON']
assert python, ("python is %r !" % (python,))
## On Mac OSX we need to use mac bundles for python plugins
if Options.platform == 'darwin':
conf.check_tool('osx')
try:
# Get some python configuration variables using distutils
v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
(python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
python_MACOSX_DEPLOYMENT_TARGET) = \
_get_python_variables(python, ["get_config_var('%s')" % x for x in v],
['from distutils.sysconfig import get_config_var'])
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")
conf.log.write("""Configuration returned from %r:
python_prefix = %r
python_SO = %r
python_SYSLIBS = %r
python_LDFLAGS = %r
python_SHLIBS = %r
python_LIBDIR = %r
python_LIBPL = %r
INCLUDEPY = %r
Py_ENABLE_SHARED = %r
MACOSX_DEPLOYMENT_TARGET = %r
""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET))
if python_MACOSX_DEPLOYMENT_TARGET:
conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
env['pyext_PATTERN'] = '%s'+python_SO
# Check for python libraries for embedding
if python_SYSLIBS is not None:
for lib in python_SYSLIBS.split():
if lib.startswith('-l'):
lib = lib[2:] # strip '-l'
env.append_value('LIB_PYEMBED', lib)
if python_SHLIBS is not None:
for lib in python_SHLIBS.split():
if lib.startswith('-l'):
lib = lib[2:] # strip '-l'
env.append_value('LIB_PYEMBED', lib)
if Options.platform != 'darwin' and python_LDFLAGS:
env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split())
result = False
name = 'python' + env['PYTHON_VERSION']
if python_LIBDIR is not None:
path = [python_LIBDIR]
conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
if not result and python_LIBPL is not None:
conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
path = [python_LIBPL]
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
if not result:
conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
path = [os.path.join(python_prefix, "libs")]
name = 'python' + env['PYTHON_VERSION'].replace('.', '')
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
if result:
env['LIBPATH_PYEMBED'] = path
env.append_value('LIB_PYEMBED', name)
else:
conf.log.write("\n\n### LIB NOT FOUND\n")
# under certain conditions, python extensions must link to
# python libraries, not just python embedding programs.
if (sys.platform == 'win32' or sys.platform.startswith('os2')
or sys.platform == 'darwin' or Py_ENABLE_SHARED):
env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
env['LIB_PYEXT'] = env['LIB_PYEMBED']
# We check that pythonX.Y-config exists, and if it exists we
# use it to get only the includes, else fall back to distutils.
python_config = conf.find_program(
'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
var='PYTHON_CONFIG')
if not python_config:
python_config = conf.find_program(
'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
var='PYTHON_CONFIG')
includes = []
if python_config:
for incstr in Utils.cmd_output("%s %s --includes" % (python, python_config)).strip().split():
# strip the -I or /I
if (incstr.startswith('-I')
or incstr.startswith('/I')):
incstr = incstr[2:]
# append include path, unless already given
if incstr not in includes:
includes.append(incstr)
conf.log.write("Include path for Python extensions "
"(found via python-config --includes): %r\n" % (includes,))
env['CPPPATH_PYEXT'] = includes
env['CPPPATH_PYEMBED'] = includes
else:
conf.log.write("Include path for Python extensions "
"(found via distutils module): %r\n" % (INCLUDEPY,))
env['CPPPATH_PYEXT'] = [INCLUDEPY]
env['CPPPATH_PYEMBED'] = [INCLUDEPY]
# Code using the Python API needs to be compiled with -fno-strict-aliasing
if env['CC_NAME'] == 'gcc':
env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
if env['CXX_NAME'] == 'gcc':
env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
# See if it compiles
test_env = env.copy()
a = test_env.append_value
a('CPPPATH', env['CPPPATH_PYEMBED'])
a('LIBPATH', env['LIBPATH_PYEMBED'])
a('LIB', env['LIB_PYEMBED'])
a('LINKFLAGS', env['LINKFLAGS_PYEMBED'])
a('CXXFLAGS', env['CXXFLAGS_PYEMBED'])
a('CCFLAGS', env['CCFLAGS_PYEMBED'])
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H',
env=test_env, fragment=FRAG_2,
errmsg='Could not find the python development headers', mandatory=1)
@conf
def check_python_version(conf, minver=None):
"""
Check if the python interpreter is found matching a given minimum version.
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
(eg. '2.4') of the actual python version found, and PYTHONDIR is
defined, pointing to the site-packages directory appropriate for
this python version, where modules/packages/extensions should be
installed.
"""
assert minver is None or isinstance(minver, tuple)
python = conf.env['PYTHON']
assert python, ("python is %r !" % (python,))
# Get python version string
cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
debug('python: Running python command %r' % cmd)
proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE)
lines = proc.communicate()[0].split()
assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
# compare python version with the minimum required
result = (minver is None) or (pyver_tuple >= minver)
if result:
# define useful environment variables
pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
conf.env['PYTHON_VERSION'] = pyver
if 'PYTHONDIR' in conf.environ:
pydir = conf.environ['PYTHONDIR']
else:
if sys.platform == 'win32':
(python_LIBDEST, pydir) = \
_get_python_variables(python,
["get_config_var('LIBDEST')",
"get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
['from distutils.sysconfig import get_config_var, get_python_lib'])
else:
python_LIBDEST = None
(pydir,) = \
_get_python_variables(python,
["get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
['from distutils.sysconfig import get_config_var, get_python_lib'])
if python_LIBDEST is None:
if conf.env['LIBDIR']:
python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
else:
python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
conf.define('PYTHONDIR', pydir)
conf.env['PYTHONDIR'] = pydir
# Feedback
pyver_full = '.'.join(map(str, pyver_tuple[:3]))
if minver is None:
conf.check_message_custom('Python version', '', pyver_full)
else:
minver_str = '.'.join(map(str, minver))
conf.check_message('Python version', ">= %s" % (minver_str,), result, option=pyver_full)
if not result:
conf.fatal("Python too old.")
@conf
def check_python_module(conf, module_name):
"""
Check if the selected python interpreter can import the given python module.
"""
result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
conf.check_message('Python module', module_name, result)
if not result:
conf.fatal("Python module not found.")
def detect(conf):
python = conf.find_program('python', var='PYTHON')
if not python: return
v = conf.env
v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
v['PYFLAGS'] = ''
v['PYFLAGS_OPT'] = '-O'
v['PYC'] = getattr(Options.options, 'pyc', 1)
v['PYO'] = getattr(Options.options, 'pyo', 1)
def set_options(opt):
opt.add_option('--nopyc',
action='store_false',
default=1,
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
dest = 'pyc')
opt.add_option('--nopyo',
action='store_false',
default=1,
help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
dest='pyo')

531
tools/wafadmin/Tools/qt4.py

@ -0,0 +1,531 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"""
Qt4 support
If QT4_ROOT is given (absolute path), the configuration will look in it first
This module also demonstrates how to add tasks dynamically (when the build has started)
"""
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True
import os, sys
import ccroot, cxx
import TaskGen, Task, Utils, Runner, Options, Node
from TaskGen import taskgen, feature, after, extension
from Logs import error
from Constants import *
MOC_H = ['.h', '.hpp', '.hxx', '.hh']
EXT_RCC = ['.qrc']
EXT_UI = ['.ui']
EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
class qxx_task(Task.Task):
"A cpp task that may create a moc task dynamically"
before = ['cxx_link', 'static_link']
def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0
def scan(self):
(nodes, names) = ccroot.scan(self)
# for some reasons (variants) the moc node may end in the list of node deps
for x in nodes:
if x.name.endswith('.moc'):
nodes.remove(x)
names.append(x.relpath_gen(self.inputs[0].parent))
return (nodes, names)
def runnable_status(self):
if self.moc_done:
# if there is a moc task, delay the computation of the file signature
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
# the moc file enters in the dependency calculation
# so we need to recompute the signature when the moc file is present
self.signature()
return Task.Task.runnable_status(self)
else:
# yes, really, there are people who generate cxx files
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
self.add_moc_tasks()
return ASK_LATER
def add_moc_tasks(self):
node = self.inputs[0]
tree = node.__class__.bld
try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')
moctasks=[]
mocfiles=[]
variant = node.variant(self.env)
try:
tmp_lst = tree.raw_deps[self.unique_id()]
tree.raw_deps[self.unique_id()] = []
except KeyError:
tmp_lst = []
for d in tmp_lst:
if not d.endswith('.moc'): continue
# paranoid check
if d in mocfiles:
error("paranoia owns")
continue
# process that base.moc only once
mocfiles.append(d)
# find the extension - this search is done only once
ext = ''
try: ext = Options.options.qt_header_ext
except AttributeError: pass
if not ext:
base2 = d[:-4]
paths = [node.parent.srcpath(self.env), node.parent.bldpath(self.env)]
poss = [(x, y) for x in MOC_H for y in paths]
for (i, path) in poss:
try:
# TODO we could use find_resource
os.stat(os.path.join(path, base2+i))
except OSError:
pass
else:
ext = i
break
if not ext: raise Utils.WafError("no header found for %s which is a moc file" % str(d))
# next time we will not search for the extension (look at the 'for' loop below)
h_node = node.parent.find_resource(base2+i)
m_node = h_node.change_ext('.moc')
tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
# create the task
task = Task.TaskBase.classes['moc'](self.env, normal=0)
task.set_inputs(h_node)
task.set_outputs(m_node)
generator = tree.generator
generator.outstanding.insert(0, task)
generator.total += 1
moctasks.append(task)
# remove raw deps except the moc files to save space (optimization)
tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
# look at the file inputs, it is set right above
lst = tree.node_deps.get(self.unique_id(), ())
for d in lst:
name = d.name
if name.endswith('.moc'):
task = Task.TaskBase.classes['moc'](self.env, normal=0)
task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
task.set_outputs(d)
generator = tree.generator
generator.outstanding.insert(0, task)
generator.total += 1
moctasks.append(task)
# simple scheduler dependency: run the moc task before others
self.run_after = moctasks
self.moc_done = 1
run = Task.TaskBase.classes['cxx'].__dict__['run']
def translation_update(task):
outs = [a.abspath(task.env) for a in task.outputs]
outs = " ".join(outs)
lupdate = task.env['QT_LUPDATE']
for x in task.inputs:
file = x.abspath(task.env)
cmd = "%s %s -ts %s" % (lupdate, file, outs)
Utils.pprint('BLUE', cmd)
task.generator.bld.exec_command(cmd)
class XMLHandler(ContentHandler):
def __init__(self):
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(''.join(self.buf))
def characters(self, cars):
self.buf.append(cars)
def scan(self):
"add the dependency on the files referenced in the qrc"
node = self.inputs[0]
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
fi = open(self.inputs[0].abspath(self.env))
parser.parse(fi)
fi.close()
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
x = x.encode('utf8')
nd = root.find_resource(x)
if nd: nodes.append(nd)
else: names.append(x)
return (nodes, names)
@extension(EXT_RCC)
def create_rcc_task(self, node):
"hook for rcc files"
rcnode = node.change_ext('_rc.cpp')
rcctask = self.create_task('rcc')
rcctask.inputs = [node]
rcctask.outputs = [rcnode]
cpptask = self.create_task('cxx')
cpptask.inputs = [rcnode]
cpptask.outputs = [rcnode.change_ext('.o')]
self.compiled_tasks.append(cpptask)
return cpptask
@extension(EXT_UI)
def create_uic_task(self, node):
"hook for uic tasks"
uictask = self.create_task('ui4')
uictask.inputs = [node]
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
class qt4_taskgen(cxx.cxx_taskgen):
def __init__(self, *k, **kw):
cxx.cxx_taskgen.__init__(self, *k, **kw)
self.features.append('qt4')
@extension('.ts')
def add_lang(self, node):
"""add all the .ts file into self.lang"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
@feature('qt4')
@after('apply_link')
def apply_qt4(self):
if getattr(self, 'lang', None):
update = getattr(self, 'update', None)
lst=[]
trans=[]
for l in self.to_list(self.lang):
if not isinstance(l, Node.Node):
l = self.path.find_resource(l+'.ts')
t = self.create_task('ts2qm')
t.set_inputs(l)
t.set_outputs(l.change_ext('.qm'))
lst.append(t.outputs[0])
if update:
trans.append(t.inputs[0])
if update and Options.options.trans_qt4:
# we need the cpp files given, except the rcc task we create after
# FIXME may be broken
u = Task.TaskCmd(translation_update, self.env, 2)
u.inputs = [a.inputs[0] for a in self.compiled_tasks]
u.outputs = trans
if getattr(self, 'langname', None):
t = Task.TaskBase.classes['qm2rcc'](self.env)
t.set_inputs(lst)
t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
t.path = self.path
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
lst = []
for flag in self.to_list(self.env['CXXFLAGS']):
if len(flag) < 2: continue
if flag[0:2] == '-D' or flag[0:2] == '-I':
lst.append(flag)
self.env['MOC_FLAGS'] = lst
@extension(EXT_QT4)
def cxx_hook(self, node):
# create the compilation task: cpp or cc
task = self.create_task('qxx')
self.compiled_tasks.append(task)
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task.inputs = [node]
task.outputs = [node.change_ext(obj_ext)]
def process_qm2rcc(task):
outfile = task.outputs[0].abspath(task.env)
f = open(outfile, 'w')
f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
for k in task.inputs:
f.write(' <file>')
#f.write(k.name)
f.write(k.path_to_parent(task.path))
f.write('</file>\n')
f.write('</qresource>\n</RCC>')
f.close()
b = Task.simple_task_type
b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
cls.scan = scan
b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
def detect_qt4(conf):
env = conf.env
opt = Options.options
qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')
qtlibs = getattr(opt, 'qtlibs', '')
useframework = getattr(opt, 'use_qt4_osxframework', True)
paths = []
# the path to qmake has been given explicitely
if qtbin:
paths = [qtbin]
# the qt directory has been given - we deduce the qt binary path
if not qtdir:
qtdir = conf.environ.get('QT4_ROOT', '')
qtbin = os.path.join(qtdir, 'bin')
paths = [qtbin]
# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = os.environ.get('PATH', '').split(os.pathsep)
paths.append('/usr/share/qt4/bin/')
try:
lst = os.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()
# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)
# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['4', '0', '0']
for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
qmake = conf.find_program(qmk, path_list=paths)
if qmake:
try:
version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
except ValueError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver > prev_ver:
cand = qmake
prev_ver = new_ver
if cand:
qmake = cand
else:
conf.fatal('could not find qmake for qt4')
conf.env.QMAKE = qmake
qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
if not qtlibs:
try:
qtlibs = Utils.cmd_output([qmake, '-query', 'QT_LIBRARIES']).strip() + os.sep
except ValueError:
qtlibs = os.path.join(qtdir, 'lib')
def find_bin(lst, var):
for f in lst:
ret = conf.find_program(f, path_list=paths)
if ret:
env[var]=ret
break
vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
framework_ok = False
if sys.platform == "darwin" and useframework:
for i in vars:
e = conf.create_framework_configurator()
e.path = [qtlibs, '/Library/Frameworks']
e.name = i
e.remove_dot_h = True
e.run()
if not i == 'QtCore':
# strip -F flag so it don't get reduant
for r in env['CCFLAGS_' + i.upper()]:
if r.startswith('-F'):
env['CCFLAGS_' + i.upper()].remove(r)
break
# incflag = '-I%s' % os.path.join(qtincludes, i)
# if not incflag in env["CCFLAGS_" + i.upper ()]:
# env['CCFLAGS_' + i.upper ()] += [incflag]
# if not incflag in env["CXXFLAGS_" + i.upper ()]:
# env['CXXFLAGS_' + i.upper ()] += [incflag]
# now we add some static depends.
if conf.is_defined('HAVE_QTOPENGL'):
env.append_unique('FRAMEWORK_QTOPENGL', 'OpenGL')
if conf.is_defined('HAVE_QTGUI'):
env.append_unique('FRAMEWORK_QTGUI', ['AppKit', 'ApplicationServices'])
framework_ok = True
# check for the qt includes first
if not conf.is_defined("HAVE_QTGUI"):
if not qtincludes: qtincludes = os.path.join(qtdir, 'include')
env.QTINCLUDEPATH = qtincludes
lst = [qtincludes, '/usr/share/qt4/include/', '/opt/qt4/include']
conf.check(header_name='QtGui/QFont', define_name='HAVE_QTGUI', mandatory=1, includes=lst)
find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
find_bin(['uic-qt4', 'uic'], 'QT_UIC')
if not env['QT_UIC']:
conf.fatal('cannot find the uic compiler for qt4')
try:
version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
except ValueError:
conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
version = version.replace('Qt User Interface Compiler ','')
version = version.replace('User Interface Compiler for Qt', '')
if version.find(" 3.") != -1:
conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
sys.exit(1)
conf.check_message('uic version', '', 1, option='(%s)'%version)
find_bin(['moc-qt4', 'moc'], 'QT_MOC')
find_bin(['rcc'], 'QT_RCC')
find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
env['UIC3_ST']= '%s -o %s'
env['UIC_ST'] = '%s -o %s'
env['MOC_ST'] = '-o'
env['ui_PATTERN'] = 'ui_%s.h'
env['QT_LRELEASE_FLAGS'] = ['-silent']
if not framework_ok: # framework_ok is false either when the platform isn't OSX, Qt4 shall not be used as framework, or Qt4 could not be found as framework
vars_debug = [a+'_debug' for a in vars]
pkgconfig = env['pkg-config'] or 'PKG_CONFIG_PATH=%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib pkg-config --silence-errors' % (qtlibs, qtlibs)
for i in vars_debug+vars:
try:
conf.check_cfg(package=i, args='--cflags --libs', path=pkgconfig)
except ValueError:
pass
# the libpaths are set nicely, unfortunately they make really long command-lines
# remove the qtcore ones from qtgui, etc
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE': continue
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core: continue
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(vars, 'LIBPATH_QTCORE')
process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
# rpath if wanted
if Options.options.want_rpath:
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_'+var] = accu
process_rpath(vars, 'LIBPATH_QTCORE')
process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
def detect(conf):
detect_qt4(conf)
def set_options(opt):
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
opt.add_option('--header-ext',
type='string',
default='',
help='header extension for moc files',
dest='qt_header_ext')
for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)
if sys.platform == "darwin":
opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)

75
tools/wafadmin/Tools/suncc.py

@ -0,0 +1,75 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
import os, optparse
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
@conftest
def find_scc(conf):
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
#if not cc: cc = conf.find_program('gcc', var='CC')
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('suncc was not found')
try:
if not Utils.cmd_output('%s -flags' % cc):
conf.fatal('suncc %r was not found' % cc)
except ValueError:
conf.fatal('suncc -flags could not be executed')
v['CC'] = cc
v['CC_NAME'] = 'sun'
@conftest
def scc_common_flags(conf):
v = conf.env
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h -Wl,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STATICLIB_MARKER'] = '-Bstatic'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
detect = '''
find_scc
find_cpp
find_ar
scc_common_flags
cc_load_tools
cc_add_flags
'''

69
tools/wafadmin/Tools/suncxx.py

@ -0,0 +1,69 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
import os, optparse
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
@conftest
def find_sxx(conf):
v = conf.env
cc = None
if v['CXX']: cc = v['CXX']
elif 'CXX' in conf.environ: cc = conf.environ['CXX']
#if not cc: cc = conf.find_program('g++', var='CXX')
if not cc: cc = conf.find_program('c++', var='CXX')
if not cc: cc = conf.find_program('CC', var='CXX') #studio
if not cc: conf.fatal('sunc++ was not found')
v['CXX'] = cc
v['CXX_NAME'] = 'sun'
@conftest
def sxx_common_flags(conf):
v = conf.env
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = ''
v['CXXLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CXXDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h -Wl,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STATICLIB_MARKER'] = '-Bstatic'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC']
v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
detect = '''
find_sxx
find_cpp
find_ar
sxx_common_flags
cxx_load_tools
cxx_add_flags
'''

235
tools/wafadmin/Tools/tex.py

@ -0,0 +1,235 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"TeX/LaTeX/PDFLaTeX support"
import os, re
import Utils, TaskGen, Task, Runner, Build
from TaskGen import feature, before
from Logs import error, warn, debug
re_tex = re.compile(r'\\(?P<type>include|input|import|bringin){(?P<file>[^{}]*)}', re.M)
def scan(self):
node = self.inputs[0]
env = self.env
nodes = []
names = []
if not node: return (nodes, names)
code = Utils.readf(node.abspath(env))
curdirnode = self.curdirnode
abs = curdirnode.abspath()
for match in re_tex.finditer(code):
path = match.group('file')
if path:
for k in ['', '.tex', '.ltx']:
# add another loop for the tex include paths?
debug('tex: trying %s%s' % (path, k))
try:
os.stat(abs+os.sep+path+k)
except OSError:
continue
found = path+k
node = curdirnode.find_resource(found)
if node:
nodes.append(node)
else:
debug('tex: could not find %s' % path)
names.append(path)
debug("tex: found the following : %s and names %s" % (nodes, names))
return (nodes, names)
g_bibtex_re = re.compile('bibdata', re.M)
def tex_build(task, command='LATEX'):
env = task.env
bld = task.generator.bld
com = '%s %s' % (env[command], env.get_flat(command+'FLAGS'))
if not env['PROMPT_LATEX']: com = "%s %s" % (com, '-interaction=batchmode')
node = task.inputs[0]
reldir = node.bld_dir(env)
srcfile = node.srcpath(env)
lst = []
for c in Utils.split_path(reldir):
if c: lst.append('..')
sr = os.path.join(*(lst + [srcfile]))
sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
aux_node = node.change_ext('.aux')
idx_node = node.change_ext('.idx')
hash = ''
old_hash = ''
nm = aux_node.name
docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
latex_compile_cmd = 'cd %s && TEXINPUTS=%s:$TEXINPUTS %s %s' % (reldir, sr2, com, sr)
warn('first pass on %s' % command)
ret = bld.exec_command(latex_compile_cmd)
if ret: return ret
# look in the .aux file if there is a bibfile to process
try:
ct = Utils.readf(aux_node.abspath(env))
except (OSError, IOError):
error('error bibtex scan')
else:
fo = g_bibtex_re.findall(ct)
# yes, there is a .aux file to process
if fo:
bibtex_compile_cmd = 'cd %s && BIBINPUTS=%s:$BIBINPUTS %s %s' % (reldir, sr2, env['BIBTEX'], docuname)
warn('calling bibtex')
ret = bld.exec_command(bibtex_compile_cmd)
if ret:
error('error when calling bibtex %s' % bibtex_compile_cmd)
return ret
# look on the filesystem if there is a .idx file to process
try:
idx_path = idx_node.abspath(env)
os.stat(idx_path)
except OSError:
error('error file.idx scan')
else:
makeindex_compile_cmd = 'cd %s && %s %s' % (reldir, env['MAKEINDEX'], idx_path)
warn('calling makeindex')
ret = bld.exec_command(makeindex_compile_cmd)
if ret:
error('error when calling makeindex %s' % makeindex_compile_cmd)
return ret
i = 0
while i < 10:
# prevent against infinite loops - one never knows
i += 1
# watch the contents of file.aux
old_hash = hash
try:
hash = Utils.h_file(aux_node.abspath(env))
except KeyError:
error('could not read aux.h -> %s' % aux_node.abspath(env))
pass
# debug
#print "hash is, ", hash, " ", old_hash
# stop if file.aux does not change anymore
if hash and hash == old_hash: break
# run the command
warn('calling %s' % command)
ret = bld.exec_command(latex_compile_cmd)
if ret:
error('error when calling %s %s' % (command, latex_compile_cmd))
return ret
# 0 means no error
return 0
latex_vardeps = ['LATEX', 'LATEXFLAGS']
def latex_build(task):
return tex_build(task, 'LATEX')
pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS']
def pdflatex_build(task):
return tex_build(task, 'PDFLATEX')
class tex_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('tex')
@before('apply_core')
def apply_tex(self):
if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
self.type = 'pdflatex'
tree = self.bld
outs = Utils.to_list(getattr(self, 'outs', []))
# prompt for incomplete files (else the batchmode is used)
self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
deps_lst = []
if getattr(self, 'deps', None):
deps = self.to_list(self.deps)
for filename in deps:
n = self.path.find_resource(filename)
if not n in deps_lst: deps_lst.append(n)
self.source = self.to_list(self.source)
for filename in self.source:
base, ext = os.path.splitext(filename)
node = self.path.find_resource(filename)
if not node: raise Utils.WafError('cannot find %s' % filename)
if self.type == 'latex':
task = self.create_task('latex')
task.set_inputs(node)
task.set_outputs(node.change_ext('.dvi'))
elif self.type == 'pdflatex':
task = self.create_task('pdflatex')
task.set_inputs(node)
task.set_outputs(node.change_ext('.pdf'))
task.env = self.env
task.curdirnode = self.path
# add the manual dependencies
if deps_lst:
variant = node.variant(self.env)
try:
lst = tree.node_deps[task.unique_id()]
for n in deps_lst:
if not n in lst:
lst.append(n)
except KeyError:
tree.node_deps[task.unique_id()] = deps_lst
if self.type == 'latex':
if 'ps' in outs:
pstask = self.create_task('dvips')
pstask.set_inputs(task.outputs)
pstask.set_outputs(node.change_ext('.ps'))
if 'pdf' in outs:
pdftask = self.create_task('dvipdf')
pdftask.set_inputs(task.outputs)
pdftask.set_outputs(node.change_ext('.pdf'))
elif self.type == 'pdflatex':
if 'ps' in outs:
pstask = self.create_task('pdf2ps')
pstask.set_inputs(task.outputs)
pstask.set_outputs(node.change_ext('.ps'))
self.source = []
def detect(conf):
v = conf.env
for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
conf.find_program(p, var=p.upper())
v[p.upper()+'FLAGS'] = ''
v['DVIPSFLAGS'] = '-Ppdf'
b = Task.simple_task_type
b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False)
b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False)
b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
b = Task.task_type_from_func
cls = b('latex', latex_build, vars=latex_vardeps)
cls.scan = scan
cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
cls.scan = scan

273
tools/wafadmin/Tools/vala.py

@ -0,0 +1,273 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
import os.path, shutil
import Task, Runner, Utils, Logs, Build, Node
from TaskGen import extension, after, before
EXT_VALA = ['.vala', '.gs']
class valac_task(Task.Task):
vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
before = ("cc", "cxx")
def run(self):
env = self.env
inputs = [a.srcpath(env) for a in self.inputs]
valac = env['VALAC']
vala_flags = env.get_flat('VALAFLAGS')
top_src = self.generator.bld.srcnode.abspath()
top_bld = self.generator.bld.srcnode.abspath(env)
if env['VALAC_VERSION'] > (0, 1, 6):
cmd = [valac, '-C', '--quiet', vala_flags]
else:
cmd = [valac, '-C', vala_flags]
if self.threading:
cmd.append('--thread')
if self.target_glib:
cmd.append('--target-glib=%s' % self.target_glib)
features = self.generator.features
if 'cshlib' in features or 'cstaticlib' in features:
output_dir = self.outputs[0].bld_dir(env)
cmd.append('--library ' + self.target)
if env['VALAC_VERSION'] >= (0, 7, 0):
cmd.append('--header ' + os.path.join(output_dir, self.target + '.h'))
self.outputs.append(self.generator.path.find_or_declare(self.target + '.h'))
cmd.append('--basedir ' + top_src)
cmd.append('-d ' + top_bld)
if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
cmd.append('--gir=%s.gir' % self.gir)
else:
output_dir = self.outputs[0].bld_dir(env)
cmd.append('-d %s' % output_dir)
for vapi_dir in self.vapi_dirs:
cmd.append('--vapidir=%s' % vapi_dir)
for package in self.packages:
cmd.append('--pkg %s' % package)
for package in self.packages_private:
cmd.append('--pkg %s' % package)
cmd.append(" ".join(inputs))
result = self.generator.bld.exec_command(" ".join(cmd))
if not 'cprogram' in features:
# generate the .deps file
if self.packages:
filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
deps = open(filename, 'w')
for package in self.packages:
deps.write(package + '\n')
deps.close()
# handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
self._fix_output("../%s.vapi" % self.target)
# handle vala >= 0.1.7 who has a weid definition for --directory
self._fix_output("%s.vapi" % self.target)
# handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
self._fix_output("%s.gidl" % self.target)
# handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
self._fix_output("%s.gir" % self.target)
if hasattr(self, 'gir'):
self._fix_output("%s.gir" % self.gir)
first = None
for node in self.outputs:
if not first:
first = node
else:
if first.parent.id != node.parent.id:
# issue #483
if env['VALAC_VERSION'] < (0, 7, 0):
shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
return result
def install(self):
bld = self.generator.bld
features = self.generator.features
if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
headers_list = [o for o in self.outputs if o.suffix() == ".h"]
vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
for header in headers_list:
top_src = self.generator.bld.srcnode
package = self.env['PACKAGE']
try:
api_version = Utils.g_module.API_VERSION
except AttributeError:
version = Utils.g_module.VERSION.split(".")
if version[0] == "0":
api_version = "0." + version[1]
else:
api_version = version[0] + ".0"
install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
bld.install_as(install_path, header, self.env)
for vapi in vapi_list:
bld.install_files('${DATAROOTDIR}/vala/vapi', vapi, self.env)
for gir in gir_list:
bld.install_files('${DATAROOTDIR}/gir-1.0', gir, self.env)
def _fix_output(self, output):
top_bld = self.generator.bld.srcnode.abspath(self.env)
try:
src = os.path.join(top_bld, output)
dst = self.generator.path.abspath (self.env)
shutil.move(src, dst)
except:
pass
@extension(EXT_VALA)
def vala_file(self, node):
valatask = getattr(self, "valatask", None)
# there is only one vala task and it compiles all vala files .. :-/
if not valatask:
valatask = self.create_task('valac')
self.valatask = valatask
self.includes = Utils.to_list(getattr(self, 'includes', []))
valatask.packages = []
valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
valatask.vapi_dirs = []
valatask.target = self.target
valatask.threading = False
valatask.install_path = self.install_path
valatask.target_glib = None
packages = Utils.to_list(getattr(self, 'packages', []))
vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
includes = []
if hasattr(self, 'uselib_local'):
local_packages = Utils.to_list(self.uselib_local)
seen = []
while len(local_packages) > 0:
package = local_packages.pop()
if package in seen:
continue
seen.append(package)
# check if the package exists
package_obj = self.name_to_obj(package)
if not package_obj:
raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
package_name = package_obj.target
package_node = package_obj.path
package_dir = package_node.relpath_gen(self.path)
for task in package_obj.tasks:
for output in task.outputs:
if output.name == package_name + ".vapi":
valatask.set_run_after(task)
if package_name not in packages:
packages.append(package_name)
if package_dir not in vapi_dirs:
vapi_dirs.append(package_dir)
if package_dir not in includes:
includes.append(package_dir)
if hasattr(package_obj, 'uselib_local'):
lst = self.to_list(package_obj.uselib_local)
lst.reverse()
local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
valatask.packages = packages
for vapi_dir in vapi_dirs:
try:
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
except AttributeError:
Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
self.includes.append(node.bld.srcnode.abspath())
self.includes.append(node.bld.srcnode.abspath(self.env))
for include in includes:
try:
self.includes.append(self.path.find_dir(include).abspath())
self.includes.append(self.path.find_dir(include).abspath(self.env))
except AttributeError:
Logs.warn("Unable to locate include directory: '%s'" % include)
if hasattr(self, 'threading'):
valatask.threading = self.threading
self.uselib = self.to_list(self.uselib)
if not 'GTHREAD' in self.uselib:
self.uselib.append('GTHREAD')
if hasattr(self, 'target_glib'):
valatask.target_glib = self.target_glib
if hasattr(self, 'gir'):
valatask.gir = self.gir
env = valatask.env
output_nodes = []
c_node = node.change_ext('.c')
output_nodes.append(c_node)
self.allnodes.append(c_node)
if env['VALAC_VERSION'] < (0, 7, 0):
output_nodes.append(node.change_ext('.h'))
else:
if not 'cprogram' in self.features:
output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
if not 'cprogram' in self.features:
output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
if env['VALAC_VERSION'] > (0, 7, 2):
if hasattr(self, 'gir'):
output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
elif env['VALAC_VERSION'] > (0, 3, 5):
output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
elif env['VALAC_VERSION'] > (0, 1, 7):
output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
if valatask.packages:
output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
valatask.inputs.append(node)
valatask.outputs.extend(output_nodes)
def detect(conf):
min_version = (0, 1, 6)
min_version_str = "%d.%d.%d" % min_version
valac = conf.find_program('valac', var='VALAC', mandatory=True)
if not conf.env["HAVE_GTHREAD"]:
conf.check_cfg(package='gthread-2.0', uselib_store='GTHREAD', args='--cflags --libs')
try:
output = Utils.cmd_output(valac + " --version", silent=True)
version = output.split(' ', 1)[-1].strip().split(".")
version = [int(x) for x in version]
valac_version = tuple(version)
except Exception:
valac_version = (0, 0, 0)
conf.check_message('program version',
'valac >= ' + min_version_str,
valac_version >= min_version,
"%d.%d.%d" % valac_version)
conf.check_tool('gnu_dirs')
if valac_version < min_version:
conf.fatal("valac version too old to be used with this tool")
return
conf.env['VALAC_VERSION'] = valac_version
conf.env['VALAFLAGS'] = ''

49
tools/wafadmin/Tools/winres.py

@ -0,0 +1,49 @@
#!/usr/bin/env python
# encoding: utf-8
# Brant Young, 2007
"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
import os, sys, re
import TaskGen, Task
from Utils import quote_whitespace
from TaskGen import extension
EXT_WINRC = ['.rc']
winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
@extension(EXT_WINRC)
def rc_file(self, node):
obj_ext = '.rc.o'
if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
rctask = self.create_task('winrc')
rctask.set_inputs(node)
rctask.set_outputs(node.change_ext(obj_ext))
# make linker can find compiled resource files
self.compiled_tasks.append(rctask)
# create our action, for use with rc file
Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
def detect(conf):
v = conf.env
winrc = v['WINRC']
v['WINRC_TGT_F'] = '-o'
v['WINRC_SRC_F'] = '-i'
# find rc.exe
if not winrc:
if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
elif v['CC_NAME'] == 'msvc':
winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
v['WINRC_TGT_F'] = '/fo'
v['WINRC_SRC_F'] = ''
if not winrc:
conf.fatal('winrc was not found!')
v['WINRCFLAGS'] = ''

652
tools/wafadmin/Utils.py

@ -0,0 +1,652 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""
Utilities, the stable ones are the following:
* h_file: compute a unique value for a file (hash), it uses
the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
else, md5 (see the python docs)
For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
it is possible to use a hashing based on the path and the size (may give broken cache results)
The method h_file MUST raise an OSError if the file is a folder
import stat
def h_file(filename):
st = os.stat(filename)
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
m = Utils.md5()
m.update(str(st.st_mtime))
m.update(str(st.st_size))
m.update(filename)
return m.digest()
To replace the function in your project, use something like this:
import Utils
Utils.h_file = h_file
* h_list
* h_fun
* get_term_cols
* ordered_dict
"""
import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
# In python 3.0 we can get rid of all this
try: from UserDict import UserDict
except ImportError: from collections import UserDict
if sys.hexversion >= 0x2060000 or os.name == 'java':
import subprocess as pproc
else:
import pproc
import Logs
from Constants import *
is_win32 = sys.platform == 'win32'
try:
# defaultdict in python 2.5
from collections import defaultdict as DefaultDict
except ImportError:
class DefaultDict(dict):
def __init__(self, default_factory):
super(DefaultDict, self).__init__()
self.default_factory = default_factory
def __getitem__(self, key):
try:
return super(DefaultDict, self).__getitem__(key)
except KeyError:
value = self.default_factory()
self[key] = value
return value
class WafError(Exception):
def __init__(self, *args):
self.args = args
self.stack = traceback.extract_stack()
Exception.__init__(self, *args)
def __str__(self):
return str(len(self.args) == 1 and self.args[0] or self.args)
class WscriptError(WafError):
def __init__(self, message, wscript_file=None):
if wscript_file:
self.wscript_file = wscript_file
self.wscript_line = None
else:
(self.wscript_file, self.wscript_line) = self.locate_error()
msg_file_line = ''
if self.wscript_file:
msg_file_line = "%s:" % self.wscript_file
if self.wscript_line:
msg_file_line += "%s:" % self.wscript_line
err_message = "%s error: %s" % (msg_file_line, message)
WafError.__init__(self, err_message)
def locate_error(self):
stack = traceback.extract_stack()
stack.reverse()
for frame in stack:
file_name = os.path.basename(frame[0])
is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
if is_wscript:
return (frame[0], frame[1])
return (None, None)
indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
try:
from fnv import new as md5
import Constants
Constants.SIG_NIL = 'signofnv'
def h_file(filename):
m = md5()
try:
m.hfile(filename)
x = m.digest()
if x is None: raise OSError("not a file")
return x
except SystemError:
raise OSError("not a file" + filename)
except ImportError:
try:
from hashlib import md5
except ImportError:
from md5 import md5
def h_file(filename):
f = open(filename, 'rb')
m = md5()
readBytes = 100000
while (filename):
filename = f.read(100000)
m.update(filename)
f.close()
return m.digest()
class ordered_dict(UserDict):
def __init__(self, dict = None):
self.allkeys = []
UserDict.__init__(self, dict)
def __delitem__(self, key):
self.allkeys.remove(key)
UserDict.__delitem__(self, key)
def __setitem__(self, key, item):
if key not in self.allkeys: self.allkeys.append(key)
UserDict.__setitem__(self, key, item)
def exec_command(s, **kw):
if 'log' in kw:
kw['stdout'] = kw['stderr'] = kw['log']
del(kw['log'])
kw['shell'] = isinstance(s, str)
try:
proc = pproc.Popen(s, **kw)
return proc.wait()
except OSError:
return -1
if is_win32:
def exec_command(s, **kw):
if 'log' in kw:
kw['stdout'] = kw['stderr'] = kw['log']
del(kw['log'])
kw['shell'] = isinstance(s, str)
if len(s) > 2000:
startupinfo = pproc.STARTUPINFO()
startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
kw['startupinfo'] = startupinfo
try:
if 'stdout' not in kw:
kw['stdout'] = pproc.PIPE
kw['stderr'] = pproc.STDOUT
proc = pproc.Popen(s,**kw)
(stdout, _) = proc.communicate()
Logs.info(stdout)
else:
proc = pproc.Popen(s,**kw)
return proc.wait()
except OSError:
return -1
listdir = os.listdir
if is_win32:
def listdir_win32(s):
if re.match('^[A-Za-z]:$', s):
# os.path.isdir fails if s contains only the drive name... (x:)
s += os.sep
if not os.path.isdir(s):
e = OSError()
e.errno = errno.ENOENT
raise e
return os.listdir(s)
listdir = listdir_win32
def waf_version(mini = 0x010000, maxi = 0x100000):
"Halts if the waf version is wrong"
ver = HEXVERSION
try: min_val = mini + 0
except TypeError: min_val = int(mini.replace('.', '0'), 16)
if min_val > ver:
Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
sys.exit(0)
try: max_val = maxi + 0
except TypeError: max_val = int(maxi.replace('.', '0'), 16)
if max_val < ver:
Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
sys.exit(0)
def python_24_guard():
if sys.hexversion<0x20400f0:
raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4")
def ex_stack():
exc_type, exc_value, tb = sys.exc_info()
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
return ''.join(exc_lines)
def to_list(sth):
if isinstance(sth, str):
return sth.split()
else:
return sth
g_loaded_modules = {}
"index modules by absolute path"
g_module=None
"the main module is special"
def load_module(file_path, name=WSCRIPT_FILE):
"this function requires an absolute path"
try:
return g_loaded_modules[file_path]
except KeyError:
pass
module = imp.new_module(name)
try:
code = readf(file_path, m='rU')
except (IOError, OSError):
raise WscriptError('Could not read the file %r' % file_path)
module.waf_hash_val = code
module_dir = os.path.dirname(file_path)
sys.path.insert(0, module_dir)
try:
exec(code, module.__dict__)
except Exception:
raise WscriptError(traceback.format_exc(), file_path)
sys.path.remove(module_dir)
g_loaded_modules[file_path] = module
return module
def set_main_module(file_path):
"Load custom options, if defined"
global g_module
g_module = load_module(file_path, 'wscript_main')
g_module.root_path = file_path
# note: to register the module globally, use the following:
# sys.modules['wscript_main'] = g_module
def to_hashtable(s):
"used for importing env files"
tbl = {}
lst = s.split('\n')
for line in lst:
if not line: continue
mems = line.split('=')
tbl[mems[0]] = mems[1]
return tbl
def get_term_cols():
"console width"
return 80
try:
import struct, fcntl, termios
except ImportError:
pass
else:
if Logs.got_tty:
def myfun():
dummy_lines, cols = struct.unpack("HHHH", \
fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
struct.pack("HHHH", 0, 0, 0, 0)))[:2]
return cols
# we actually try the function once to see if it is suitable
try:
myfun()
except IOError:
pass
else:
get_term_cols = myfun
rot_idx = 0
rot_chr = ['\\', '|', '/', '-']
"the rotation character in the progress bar"
def split_path(path):
return path.split('/')
def split_path_cygwin(path):
if path.startswith('//'):
ret = path.split('/')[2:]
ret[0] = '/' + ret[0]
return ret
return path.split('/')
re_sp = re.compile('[/\\\\]')
def split_path_win32(path):
if path.startswith('\\\\'):
ret = re.split(re_sp, path)[2:]
ret[0] = '\\' + ret[0]
return ret
return re.split(re_sp, path)
if sys.platform == 'cygwin':
split_path = split_path_cygwin
elif is_win32:
split_path = split_path_win32
def copy_attrs(orig, dest, names, only_if_set=False):
for a in to_list(names):
u = getattr(orig, a, ())
if u or not only_if_set:
setattr(dest, a, u)
def def_attrs(cls, **kw):
'''
set attributes for class.
@param cls [any class]: the class to update the given attributes in.
@param kw [dictionary]: dictionary of attributes names and values.
if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
'''
for k, v in kw.iteritems():
if not hasattr(cls, k):
setattr(cls, k, v)
quote_define_name_table = None
def quote_define_name(path):
"Converts a string to a constant name, foo/zbr-xpto.h -> FOO_ZBR_XPTO_H"
global quote_define_name_table
if not quote_define_name_table:
invalid_chars = set([chr(x) for x in xrange(256)]) - set(string.digits + string.uppercase)
quote_define_name_table = string.maketrans(''.join(invalid_chars), '_'*len(invalid_chars))
return string.translate(string.upper(path), quote_define_name_table)
def quote_whitespace(path):
return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
def trimquotes(s):
if not s: return ''
s = s.rstrip()
if s[0] == "'" and s[-1] == "'": return s[1:-1]
return s
def h_list(lst):
m = md5()
m.update(str(lst))
return m.digest()
def h_fun(fun):
try:
return fun.code
except AttributeError:
try:
h = inspect.getsource(fun)
except IOError:
h = "nocode"
try:
fun.code = h
except AttributeError:
pass
return h
def pprint(col, str, label='', sep=os.linesep):
"print messages in color"
sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
def check_dir(dir):
"""If a folder doesn't exists, create it."""
try:
os.stat(dir)
except OSError:
try:
os.makedirs(dir)
except OSError, e:
raise WafError("Cannot create folder '%s' (original error: %s)" % (dir, e))
def cmd_output(cmd, **kw):
silent = False
if 'silent' in kw:
silent = kw['silent']
del(kw['silent'])
if 'e' in kw:
tmp = kw['e']
del(kw['e'])
kw['env'] = tmp
kw['shell'] = isinstance(cmd, str)
kw['stdout'] = pproc.PIPE
if silent:
kw['stderr'] = pproc.PIPE
try:
p = pproc.Popen(cmd, **kw)
output = p.communicate()[0]
except OSError, e:
raise ValueError(str(e))
if p.returncode:
if not silent:
msg = "command execution failed: %s -> %r" % (cmd, str(output))
raise ValueError(msg)
output = ''
return output
reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
def subst_vars(expr, params):
"substitute ${PREFIX}/bin in /usr/local/bin"
def repl_var(m):
if m.group(1):
return '\\'
if m.group(2):
return '$'
try:
# environments may contain lists
return params.get_flat(m.group(3))
except AttributeError:
return params[m.group(3)]
return reg_subst.sub(repl_var, expr)
def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
"infers the binary format from the unversioned_sys_platform name."
if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos'):
return 'elf'
elif unversioned_sys_platform == 'darwin':
return 'mac-o'
elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
return 'pe'
# TODO we assume all other operating systems are elf, which is not true.
# we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
return 'elf'
def unversioned_sys_platform():
"""returns an unversioned name from sys.platform.
sys.plaform is not very well defined and depends directly on the python source tree.
The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
i.e., it's possible to get freebsd7 on a freebsd8 system.
So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
Some possible values of sys.platform are, amongst others:
aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
generic irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
Investigating the python source tree may reveal more values.
"""
s = sys.platform
if s == 'java':
# The real OS is hidden under the JVM.
from java.lang import System
s = System.getProperty('os.name')
# see http://lopica.sourceforge.net/os.html for a list of possible values
if s == 'Mac OS X':
return 'darwin'
elif s.startswith('Windows '):
return 'win32'
elif s == 'OS/2':
return 'os2'
elif s == 'HP-UX':
return 'hpux'
elif s in ('SunOS', 'Solaris'):
return 'sunos'
else: s = s.lower()
if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
return re.split('\d+$', s)[0]
#@deprecated('use unversioned_sys_platform instead')
def detect_platform():
"""this function has been in the Utils module for some time.
It's hard to guess what people have used it for.
It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
For example, the version is not removed on freebsd and netbsd, amongst others.
"""
s = sys.platform
# known POSIX
for x in 'cygwin linux irix sunos hpux aix darwin'.split():
# sys.platform may be linux2
if s.find(x) >= 0:
return x
# unknown POSIX
if os.name in 'posix java os2'.split():
return os.name
return s
def load_tool(tool, tooldir=None):
if tooldir:
assert isinstance(tooldir, list)
sys.path = tooldir + sys.path
try:
try:
return __import__(tool)
except ImportError, e:
raise WscriptError('Could not load the tool %r in %r' % (tool, sys.path))
finally:
if tooldir:
for d in tooldir:
sys.path.remove(d)
def readf(fname, m='r'):
"get the contents of a file, it is not used anywhere for the moment"
f = None
try:
f = open(fname, m)
txt = f.read()
finally:
if f: f.close()
return txt
def nada(*k, **kw):
"""A function that does nothing"""
pass
def diff_path(top, subdir):
"""difference between two absolute paths"""
top = os.path.normpath(top).replace('\\', '/').split('/')
subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
if len(top) == len(subdir): return ''
diff = subdir[len(top) - len(subdir):]
return os.path.join(*diff)
class Context(object):
"""A base class for commands to be executed from Waf scripts"""
def set_curdir(self, dir):
self.curdir_ = dir
def get_curdir(self):
try:
return self.curdir_
except AttributeError:
self.curdir_ = os.getcwd()
return self.get_curdir()
curdir = property(get_curdir, set_curdir)
def recurse(self, dirs, name=''):
"""The function for calling scripts from folders, it tries to call wscript + function_name
and if that file does not exist, it will call the method 'function_name' from a file named wscript
the dirs can be a list of folders or a string containing space-separated folder paths
"""
if not name:
name = inspect.stack()[1][3]
if isinstance(dirs, str):
dirs = to_list(dirs)
for x in dirs:
if os.path.isabs(x):
nexdir = x
else:
nexdir = os.path.join(self.curdir, x)
base = os.path.join(nexdir, WSCRIPT_FILE)
try:
txt = readf(base + '_' + name, m='rU')
except (OSError, IOError):
try:
module = load_module(base)
except OSError:
raise WscriptError('No such script %s' % base)
try:
f = module.__dict__[name]
except KeyError:
raise WscriptError('No function %s defined in %s' % (name, base))
if getattr(self.__class__, 'pre_recurse', None):
self.pre_recurse(f, base, nexdir)
old = self.curdir
self.curdir = nexdir
try:
f(self)
finally:
self.curdir = old
if getattr(self.__class__, 'post_recurse', None):
self.post_recurse(module, base, nexdir)
else:
dc = {'ctx': self}
if getattr(self.__class__, 'pre_recurse', None):
dc = self.pre_recurse(txt, base + '_' + name, nexdir)
old = self.curdir
self.curdir = nexdir
try:
try:
exec(txt, dc)
except Exception:
raise WscriptError(traceback.format_exc(), base)
finally:
self.curdir = old
if getattr(self.__class__, 'post_recurse', None):
self.post_recurse(txt, base + '_' + name, nexdir)
if is_win32:
old = shutil.copy2
def copy2(src, dst):
old(src, dst)
shutil.copystat(src, src)
setattr(shutil, 'copy2', copy2)
def get_elapsed_time(start):
"Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
delta = datetime.datetime.now() - start
# cast to int necessary for python 3.0
days = int(delta.days)
hours = int(delta.seconds / 3600)
minutes = int((delta.seconds - hours * 3600) / 60)
seconds = delta.seconds - hours * 3600 - minutes * 60 \
+ float(delta.microseconds) / 1000 / 1000
result = ''
if days:
result += '%dd' % days
if days or hours:
result += '%dh' % hours
if days or hours or minutes:
result += '%dm' % minutes
return '%s%.3fs' % (result, seconds)
if os.name == 'java':
# For Jython (they should really fix the inconsistency)
try:
gc.disable()
gc.enable()
except NotImplementedError:
gc.disable = gc.enable

3
tools/wafadmin/__init__.py

@ -0,0 +1,3 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)

620
tools/wafadmin/pproc.py

@ -0,0 +1,620 @@
# borrowed from python 2.5.2c1
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
# Licensed to PSF under a Contributor Agreement.
import sys
mswindows = (sys.platform == "win32")
import os
import types
import traceback
import gc
class CalledProcessError(Exception):
def __init__(self, returncode, cmd):
self.returncode = returncode
self.cmd = cmd
def __str__(self):
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
if mswindows:
import threading
import msvcrt
if 0:
import pywintypes
from win32api import GetStdHandle, STD_INPUT_HANDLE, \
STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
from win32api import GetCurrentProcess, DuplicateHandle, \
GetModuleFileName, GetVersion
from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
from win32pipe import CreatePipe
from win32process import CreateProcess, STARTUPINFO, \
GetExitCodeProcess, STARTF_USESTDHANDLES, \
STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
else:
from _subprocess import *
class STARTUPINFO:
dwFlags = 0
hStdInput = None
hStdOutput = None
hStdError = None
wShowWindow = 0
class pywintypes:
error = IOError
else:
import select
import errno
import fcntl
import pickle
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
try:
MAXFD = os.sysconf("SC_OPEN_MAX")
except:
MAXFD = 256
try:
False
except NameError:
False = 0
True = 1
_active = []
def _cleanup():
for inst in _active[:]:
if inst.poll(_deadstate=sys.maxint) >= 0:
try:
_active.remove(inst)
except ValueError:
pass
PIPE = -1
STDOUT = -2
def call(*popenargs, **kwargs):
return Popen(*popenargs, **kwargs).wait()
def check_call(*popenargs, **kwargs):
retcode = call(*popenargs, **kwargs)
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
if retcode:
raise CalledProcessError(retcode, cmd)
return retcode
def list2cmdline(seq):
result = []
needquote = False
for arg in seq:
bs_buf = []
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg) or arg == ""
if needquote:
result.append('"')
for c in arg:
if c == '\\':
bs_buf.append(c)
elif c == '"':
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
class Popen(object):
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
_cleanup()
self._child_created = False
if not isinstance(bufsize, (int, long)):
raise TypeError("bufsize must be an integer")
if mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows platforms")
if close_fds:
raise ValueError("close_fds is not supported on Windows platforms")
else:
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows platforms")
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.universal_newlines = universal_newlines
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
self._execute_child(args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
if mswindows:
if stdin is None and p2cwrite is not None:
os.close(p2cwrite)
p2cwrite = None
if stdout is None and c2pread is not None:
os.close(c2pread)
c2pread = None
if stderr is None and errread is not None:
os.close(errread)
errread = None
if p2cwrite:
self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
if c2pread:
if universal_newlines:
self.stdout = os.fdopen(c2pread, 'rU', bufsize)
else:
self.stdout = os.fdopen(c2pread, 'rb', bufsize)
if errread:
if universal_newlines:
self.stderr = os.fdopen(errread, 'rU', bufsize)
else:
self.stderr = os.fdopen(errread, 'rb', bufsize)
def _translate_newlines(self, data):
data = data.replace("\r\n", "\n")
data = data.replace("\r", "\n")
return data
def __del__(self, sys=sys):
if not self._child_created:
return
self.poll(_deadstate=sys.maxint)
if self.returncode is None and _active is not None:
_active.append(self)
def communicate(self, input=None):
if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
stdout = None
stderr = None
if self.stdin:
if input:
self.stdin.write(input)
self.stdin.close()
elif self.stdout:
stdout = self.stdout.read()
elif self.stderr:
stderr = self.stderr.read()
self.wait()
return (stdout, stderr)
return self._communicate(input)
if mswindows:
def _get_handles(self, stdin, stdout, stderr):
if stdin is None and stdout is None and stderr is None:
return (None, None, None, None, None, None)
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
p2cread = GetStdHandle(STD_INPUT_HANDLE)
if p2cread is not None:
pass
elif stdin is None or stdin == PIPE:
p2cread, p2cwrite = CreatePipe(None, 0)
p2cwrite = p2cwrite.Detach()
p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
if c2pwrite is not None:
pass
elif stdout is None or stdout == PIPE:
c2pread, c2pwrite = CreatePipe(None, 0)
c2pread = c2pread.Detach()
c2pread = msvcrt.open_osfhandle(c2pread, 0)
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = GetStdHandle(STD_ERROR_HANDLE)
if errwrite is not None:
pass
elif stderr is None or stderr == PIPE:
errread, errwrite = CreatePipe(None, 0)
errread = errread.Detach()
errread = msvcrt.open_osfhandle(errread, 0)
elif stderr == STDOUT:
errwrite = c2pwrite
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
def _find_w9xpopen(self):
w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
if not os.path.exists(w9xpopen):
w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
if not os.path.exists(w9xpopen):
raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
return w9xpopen
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
if not isinstance(args, types.StringTypes):
args = list2cmdline(args)
if startupinfo is None:
startupinfo = STARTUPINFO()
if None not in (p2cread, c2pwrite, errwrite):
startupinfo.dwFlags |= STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
if shell:
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
startupinfo.wShowWindow = SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = comspec + " /c " + args
if (GetVersion() >= 0x80000000L or
os.path.basename(comspec).lower() == "command.com"):
w9xpopen = self._find_w9xpopen()
args = '"%s" %s' % (w9xpopen, args)
creationflags |= CREATE_NEW_CONSOLE
try:
hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
except pywintypes.error, e:
raise WindowsError(*e.args)
self._child_created = True
self._handle = hp
self.pid = pid
ht.Close()
if p2cread is not None:
p2cread.Close()
if c2pwrite is not None:
c2pwrite.Close()
if errwrite is not None:
errwrite.Close()
def poll(self, _deadstate=None):
if self.returncode is None:
if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
self.returncode = GetExitCodeProcess(self._handle)
return self.returncode
def wait(self):
if self.returncode is None:
obj = WaitForSingleObject(self._handle, INFINITE)
self.returncode = GetExitCodeProcess(self._handle)
return self.returncode
def _readerthread(self, fh, buffer):
buffer.append(fh.read())
def _communicate(self, input):
stdout = None
stderr = None
if self.stdout:
stdout = []
stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
stdout_thread.setDaemon(True)
stdout_thread.start()
if self.stderr:
stderr = []
stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
stderr_thread.setDaemon(True)
stderr_thread.start()
if self.stdin:
if input is not None:
self.stdin.write(input)
self.stdin.close()
if self.stdout:
stdout_thread.join()
if self.stderr:
stderr_thread.join()
if stdout is not None:
stdout = stdout[0]
if stderr is not None:
stderr = stderr[0]
if self.universal_newlines and hasattr(file, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)
else:
def _get_handles(self, stdin, stdout, stderr):
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = os.pipe()
elif isinstance(stdin, int):
p2cread = stdin
else:
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = os.pipe()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == PIPE:
errread, errwrite = os.pipe()
elif stderr == STDOUT:
errwrite = c2pwrite
elif isinstance(stderr, int):
errwrite = stderr
else:
errwrite = stderr.fileno()
return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
def _set_cloexec_flag(self, fd):
try:
cloexec_flag = fcntl.FD_CLOEXEC
except AttributeError:
cloexec_flag = 1
old = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
def _close_fds(self, but):
for i in xrange(3, MAXFD):
if i == but:
continue
try:
os.close(i)
except:
pass
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines, startupinfo, creationflags, shell,
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
if isinstance(args, types.StringTypes):
args = [args]
else:
args = list(args)
if shell:
args = ["/bin/sh", "-c"] + args
if executable is None:
executable = args[0]
errpipe_read, errpipe_write = os.pipe()
self._set_cloexec_flag(errpipe_write)
gc_was_enabled = gc.isenabled()
gc.disable()
try:
self.pid = os.fork()
except:
if gc_was_enabled:
gc.enable()
raise
self._child_created = True
if self.pid == 0:
try:
if p2cwrite:
os.close(p2cwrite)
if c2pread:
os.close(c2pread)
if errread:
os.close(errread)
os.close(errpipe_read)
if p2cread:
os.dup2(p2cread, 0)
if c2pwrite:
os.dup2(c2pwrite, 1)
if errwrite:
os.dup2(errwrite, 2)
if p2cread and p2cread not in (0,):
os.close(p2cread)
if c2pwrite and c2pwrite not in (p2cread, 1):
os.close(c2pwrite)
if errwrite and errwrite not in (p2cread, c2pwrite, 2):
os.close(errwrite)
if close_fds:
self._close_fds(but=errpipe_write)
if cwd is not None:
os.chdir(cwd)
if preexec_fn:
apply(preexec_fn)
if env is None:
os.execvp(executable, args)
else:
os.execvpe(executable, args, env)
except:
exc_type, exc_value, tb = sys.exc_info()
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
exc_value.child_traceback = ''.join(exc_lines)
os.write(errpipe_write, pickle.dumps(exc_value))
os._exit(255)
if gc_was_enabled:
gc.enable()
os.close(errpipe_write)
if p2cread and p2cwrite:
os.close(p2cread)
if c2pwrite and c2pread:
os.close(c2pwrite)
if errwrite and errread:
os.close(errwrite)
data = os.read(errpipe_read, 1048576)
os.close(errpipe_read)
if data != "":
os.waitpid(self.pid, 0)
child_exception = pickle.loads(data)
raise child_exception
def _handle_exitstatus(self, sts):
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
elif os.WIFEXITED(sts):
self.returncode = os.WEXITSTATUS(sts)
else:
raise RuntimeError("Unknown child exit status!")
def poll(self, _deadstate=None):
if self.returncode is None:
try:
pid, sts = os.waitpid(self.pid, os.WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
except os.error:
if _deadstate is not None:
self.returncode = _deadstate
return self.returncode
def wait(self):
if self.returncode is None:
pid, sts = os.waitpid(self.pid, 0)
self._handle_exitstatus(sts)
return self.returncode
def _communicate(self, input):
read_set = []
write_set = []
stdout = None
stderr = None
if self.stdin:
self.stdin.flush()
if input:
write_set.append(self.stdin)
else:
self.stdin.close()
if self.stdout:
read_set.append(self.stdout)
stdout = []
if self.stderr:
read_set.append(self.stderr)
stderr = []
input_offset = 0
while read_set or write_set:
rlist, wlist, xlist = select.select(read_set, write_set, [])
if self.stdin in wlist:
bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
input_offset += bytes_written
if input_offset >= len(input):
self.stdin.close()
write_set.remove(self.stdin)
if self.stdout in rlist:
data = os.read(self.stdout.fileno(), 1024)
if data == "":
self.stdout.close()
read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data = os.read(self.stderr.fileno(), 1024)
if data == "":
self.stderr.close()
read_set.remove(self.stderr)
stderr.append(data)
if stdout is not None:
stdout = ''.join(stdout)
if stderr is not None:
stderr = ''.join(stderr)
if self.universal_newlines and hasattr(file, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)

95
tools/wafadmin/py3kfixes.py

@ -0,0 +1,95 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2009 (ita)
"""
Fixes for py3k go here
"""
import os
all_modifs = {}
def modif(dir, name, fun):
if name == '*':
lst = os.listdir(dir) + ['Tools' + os.sep + x for x in os.listdir(os.path.join(dir, 'Tools'))]
for x in lst:
if x.endswith('.py'):
modif(dir, x, fun)
return
filename = os.path.join(dir, name)
f = open(filename, 'r')
txt = f.read()
f.close()
txt = fun(txt)
f = open(filename, 'w')
f.write(txt)
f.close()
def subst(filename):
def do_subst(fun):
global all_modifs
try:
all_modifs[filename] += fun
except KeyError:
all_modifs[filename] = [fun]
return fun
return do_subst
@subst('Constants.py')
def r1(code):
code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
code = code.replace("ABI=7", "ABI=37")
return code
@subst('Tools/ccroot.py')
def r2(code):
code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
code = code.replace("out=str(out)", "out=out.decode('utf-8')")
return code
@subst('Utils.py')
def r3(code):
code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
return code
@subst('Task.py')
def r4(code):
code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
code = code.replace("up(x.name)", "up(x.name.encode())")
code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'class TaskBase(object, metaclass=store_task_type):')
code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
return code
@subst('Build.py')
def r5(code):
code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
return code
@subst('*')
def r6(code):
code = code.replace('xrange', 'range')
code = code.replace('iteritems', 'items')
code = code.replace('maxint', 'maxsize')
code = code.replace('iterkeys', 'keys')
code = code.replace('Error,e:', 'Error as e:')
code = code.replace('Exception,e:', 'Exception as e:')
return code
@subst('TaskGen.py')
def r7(code):
code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
return code
def fixdir(dir):
global all_modifs
for k in all_modifs:
for v in all_modifs[k]:
modif(os.path.join(dir, 'wafadmin'), k, v)
#print('substitutions finished')
Loading…
Cancel
Save