Browse Source

addon: remove node-waf, superseded by node-gyp

v0.9.1-release
Ben Noordhuis 13 years ago
parent
commit
de32b38992
  1. 6
      configure
  2. 1
      doc/api/process.markdown
  3. 54
      tools/install.py
  4. 17
      tools/node-waf
  5. 159
      tools/waf-light
  6. 1021
      tools/wafadmin/Build.py
  7. 387
      tools/wafadmin/Configure.py
  8. 76
      tools/wafadmin/Constants.py
  9. 210
      tools/wafadmin/Environment.py
  10. 134
      tools/wafadmin/Logs.py
  11. 693
      tools/wafadmin/Node.py
  12. 279
      tools/wafadmin/Options.py
  13. 229
      tools/wafadmin/Runner.py
  14. 586
      tools/wafadmin/Scripting.py
  15. 1171
      tools/wafadmin/Task.py
  16. 588
      tools/wafadmin/TaskGen.py
  17. 4
      tools/wafadmin/Tools/__init__.py
  18. 36
      tools/wafadmin/Tools/ar.py
  19. 100
      tools/wafadmin/Tools/cc.py
  20. 625
      tools/wafadmin/Tools/ccroot.py
  21. 66
      tools/wafadmin/Tools/compiler_cc.py
  22. 61
      tools/wafadmin/Tools/compiler_cxx.py
  23. 33
      tools/wafadmin/Tools/compiler_d.py
  24. 729
      tools/wafadmin/Tools/config_c.py
  25. 104
      tools/wafadmin/Tools/cxx.py
  26. 540
      tools/wafadmin/Tools/d.py
  27. 64
      tools/wafadmin/Tools/dmd.py
  28. 38
      tools/wafadmin/Tools/gas.py
  29. 137
      tools/wafadmin/Tools/gcc.py
  30. 52
      tools/wafadmin/Tools/gdc.py
  31. 111
      tools/wafadmin/Tools/gnu_dirs.py
  32. 18
      tools/wafadmin/Tools/gob2.py
  33. 134
      tools/wafadmin/Tools/gxx.py
  34. 37
      tools/wafadmin/Tools/icc.py
  35. 34
      tools/wafadmin/Tools/icpc.py
  36. 139
      tools/wafadmin/Tools/intltool.py
  37. 330
      tools/wafadmin/Tools/libtool.py
  38. 430
      tools/wafadmin/Tools/misc.py
  39. 49
      tools/wafadmin/Tools/nasm.py
  40. 86
      tools/wafadmin/Tools/node_addon.py
  41. 187
      tools/wafadmin/Tools/osx.py
  42. 813
      tools/wafadmin/Tools/preproc.py
  43. 401
      tools/wafadmin/Tools/python.py
  44. 77
      tools/wafadmin/Tools/suncc.py
  45. 75
      tools/wafadmin/Tools/suncxx.py
  46. 305
      tools/wafadmin/Tools/unittestw.py
  47. 45
      tools/wafadmin/Tools/winres.py
  48. 77
      tools/wafadmin/Tools/xlc.py
  49. 77
      tools/wafadmin/Tools/xlcxx.py
  50. 707
      tools/wafadmin/Utils.py
  51. 3
      tools/wafadmin/__init__.py
  52. 221
      tools/wafadmin/ansiterm.py
  53. 620
      tools/wafadmin/pproc.py
  54. 122
      tools/wafadmin/py3kfixes.py

6
configure

@ -29,11 +29,6 @@ parser.add_option("--without-npm",
dest="without_npm", dest="without_npm",
help="Don\'t install the bundled npm package manager") help="Don\'t install the bundled npm package manager")
parser.add_option("--without-waf",
action="store_true",
dest="without_waf",
help="Don\'t install node-waf")
parser.add_option("--without-ssl", parser.add_option("--without-ssl",
action="store_true", action="store_true",
dest="without_ssl", dest="without_ssl",
@ -321,7 +316,6 @@ def configure_node(o):
o['variables']['v8_no_strict_aliasing'] = 1 # work around compiler bugs o['variables']['v8_no_strict_aliasing'] = 1 # work around compiler bugs
o['variables']['node_prefix'] = os.path.expanduser(options.prefix or '') o['variables']['node_prefix'] = os.path.expanduser(options.prefix or '')
o['variables']['node_install_npm'] = b(not options.without_npm) o['variables']['node_install_npm'] = b(not options.without_npm)
o['variables']['node_install_waf'] = b(not options.without_waf)
o['default_configuration'] = 'Debug' if options.debug else 'Release' o['default_configuration'] = 'Debug' if options.debug else 'Release'
host_arch = host_arch_win() if os.name == 'nt' else host_arch_cc() host_arch = host_arch_win() if os.name == 'nt' else host_arch_cc()

1
doc/api/process.markdown

@ -298,7 +298,6 @@ An example of the possible output looks like:
variables: variables:
{ host_arch: 'x64', { host_arch: 'x64',
node_install_npm: 'true', node_install_npm: 'true',
node_install_waf: 'true',
node_prefix: '', node_prefix: '',
node_shared_v8: 'false', node_shared_v8: 'false',
node_shared_zlib: 'false', node_shared_zlib: 'false',

54
tools/install.py

@ -73,59 +73,6 @@ def try_remove(path, dst):
def install(paths, dst): map(lambda path: try_copy(path, dst), paths) def install(paths, dst): map(lambda path: try_copy(path, dst), paths)
def uninstall(paths, dst): map(lambda path: try_remove(path, dst), paths) def uninstall(paths, dst): map(lambda path: try_remove(path, dst), paths)
def waf_files(action):
action(['tools/node-waf'], 'bin/node-waf')
action(['tools/wafadmin/ansiterm.py',
'tools/wafadmin/Build.py',
'tools/wafadmin/Configure.py',
'tools/wafadmin/Constants.py',
'tools/wafadmin/Environment.py',
'tools/wafadmin/__init__.py',
'tools/wafadmin/Logs.py',
'tools/wafadmin/Node.py',
'tools/wafadmin/Options.py',
'tools/wafadmin/pproc.py',
'tools/wafadmin/py3kfixes.py',
'tools/wafadmin/Runner.py',
'tools/wafadmin/Scripting.py',
'tools/wafadmin/TaskGen.py',
'tools/wafadmin/Task.py',
'tools/wafadmin/Tools/ar.py',
'tools/wafadmin/Tools/cc.py',
'tools/wafadmin/Tools/ccroot.py',
'tools/wafadmin/Tools/compiler_cc.py',
'tools/wafadmin/Tools/compiler_cxx.py',
'tools/wafadmin/Tools/compiler_d.py',
'tools/wafadmin/Tools/config_c.py',
'tools/wafadmin/Tools/cxx.py',
'tools/wafadmin/Tools/dmd.py',
'tools/wafadmin/Tools/d.py',
'tools/wafadmin/Tools/gas.py',
'tools/wafadmin/Tools/gcc.py',
'tools/wafadmin/Tools/gdc.py',
'tools/wafadmin/Tools/gnu_dirs.py',
'tools/wafadmin/Tools/gob2.py',
'tools/wafadmin/Tools/gxx.py',
'tools/wafadmin/Tools/icc.py',
'tools/wafadmin/Tools/icpc.py',
'tools/wafadmin/Tools/__init__.py',
'tools/wafadmin/Tools/intltool.py',
'tools/wafadmin/Tools/libtool.py',
'tools/wafadmin/Tools/misc.py',
'tools/wafadmin/Tools/nasm.py',
'tools/wafadmin/Tools/node_addon.py',
'tools/wafadmin/Tools/osx.py',
'tools/wafadmin/Tools/preproc.py',
'tools/wafadmin/Tools/python.py',
'tools/wafadmin/Tools/suncc.py',
'tools/wafadmin/Tools/suncxx.py',
'tools/wafadmin/Tools/unittestw.py',
'tools/wafadmin/Tools/winres.py',
'tools/wafadmin/Tools/xlc.py',
'tools/wafadmin/Tools/xlcxx.py',
'tools/wafadmin/Utils.py'],
'lib/node/')
def update_shebang(path, shebang): def update_shebang(path, shebang):
print 'updating shebang of %s' % path print 'updating shebang of %s' % path
s = open(path, 'r').read() s = open(path, 'r').read()
@ -186,7 +133,6 @@ def files(action):
# with dtrace support now (oracle's "unbreakable" linux) # with dtrace support now (oracle's "unbreakable" linux)
action(['src/node.d'], 'lib/dtrace/') action(['src/node.d'], 'lib/dtrace/')
if variables.get('node_install_waf'): waf_files(action)
if variables.get('node_install_npm'): npm_files(action) if variables.get('node_install_npm'): npm_files(action)
def run(args): def run(args):

17
tools/node-waf

@ -1,17 +0,0 @@
#!/usr/bin/env python
import os, sys
join = os.path.join
bindir = os.path.dirname(os.path.realpath(__file__))
prefix = join(bindir, "..")
wafdir = join(prefix, "lib", "node")
w = join(wafdir, 'wafadmin')
t = join(w, 'Tools')
sys.path = [w, t] + sys.path
import Scripting
VERSION="1.5.16"
Scripting.prepare(t, os.getcwd(), VERSION, wafdir)
sys.exit(0)

159
tools/waf-light

@ -1,159 +0,0 @@
#!/usr/bin/env python
# encoding: ISO8859-1
# Thomas Nagy, 2005-2010
"""
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import os, sys
if sys.hexversion<0x203000f: raise ImportError("Waf requires Python >= 2.3")
if 'PSYCOWAF' in os.environ:
try:import psyco;psyco.full()
except:pass
VERSION="1.5.16"
REVISION="x"
INSTALL="x"
C1='x'
C2='x'
cwd = os.getcwd()
join = os.path.join
WAF='waf'
def b(x):
return x
if sys.hexversion>0x300000f:
WAF='waf3'
def b(x):
return x.encode()
def err(m):
print(('\033[91mError: %s\033[0m' % m))
sys.exit(1)
def unpack_wafdir(dir):
f = open(sys.argv[0],'rb')
c = "corrupted waf (%d)"
while 1:
line = f.readline()
if not line: err("run waf-light from a folder containing wafadmin")
if line == b('#==>\n'):
txt = f.readline()
if not txt: err(c % 1)
if f.readline()!=b('#<==\n'): err(c % 2)
break
if not txt: err(c % 3)
txt = txt[1:-1].replace(b(C1), b('\n')).replace(b(C2), b('\r'))
import shutil, tarfile
try: shutil.rmtree(dir)
except OSError: pass
try:
for x in ['Tools', '3rdparty']:
os.makedirs(join(dir, 'wafadmin', x))
except OSError:
err("Cannot unpack waf lib into %s\nMove waf into a writeable directory" % dir)
os.chdir(dir)
tmp = 't.bz2'
t = open(tmp,'wb')
t.write(txt)
t.close()
t = None
try:
t = tarfile.open(tmp)
except:
try:
os.system('bunzip2 t.bz2')
t = tarfile.open('t')
except:
os.chdir(cwd)
try: shutil.rmtree(dir)
except OSError: pass
err("Waf cannot be unpacked, check that bzip2 support is present")
for x in t: t.extract(x)
t.close()
for x in ['Tools', '3rdparty']:
os.chmod(join('wafadmin',x), 493)
if sys.hexversion>0x300000f:
sys.path = [join(dir, 'wafadmin')] + sys.path
import py3kfixes
py3kfixes.fixdir(dir)
os.chdir(cwd)
def test(dir):
try: os.stat(join(dir, 'wafadmin')); return os.path.abspath(dir)
except OSError: pass
def find_lib():
name = sys.argv[0]
base = os.path.dirname(os.path.abspath(name))
#devs use $WAFDIR
w=test(os.environ.get('WAFDIR', ''))
if w: return w
#waf-light
if name.endswith('waf-light'):
w = test(base)
if w: return w
err("waf-light requires wafadmin -> export WAFDIR=/folder")
dir = "/lib/%s-%s-%s/" % (WAF, VERSION, REVISION)
for i in [INSTALL,'/usr','/usr/local','/opt']:
w = test(i+dir)
if w: return w
#waf-local
s = '.%s-%s-%s'
if sys.platform == 'win32': s = s[1:]
dir = join(base, s % (WAF, VERSION, REVISION))
w = test(dir)
if w: return w
#unpack
unpack_wafdir(dir)
return dir
wafdir = find_lib()
w = join(wafdir, 'wafadmin')
t = join(w, 'Tools')
f = join(w, '3rdparty')
sys.path = [w, t, f] + sys.path
if __name__ == '__main__':
import Scripting
Scripting.prepare(t, cwd, VERSION, wafdir)

1021
tools/wafadmin/Build.py

File diff suppressed because it is too large

387
tools/wafadmin/Configure.py

@ -1,387 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"""
Configuration system
A configuration instance is created when "waf configure" is called, it is used to:
* create data dictionaries (Environment instances)
* store the list of modules to import
The old model (copied from Scons) was to store logic (mapping file extensions to functions)
along with the data. In Waf a way was found to separate that logic by adding an indirection
layer (storing the names in the Environment instances)
In the new model, the logic is more object-oriented, and the user scripts provide the
logic. The data files (Environments) must contain configuration data only (flags, ..).
Note: the c/c++ related code is in the module config_c
"""
import os, shlex, sys, time
try: import cPickle
except ImportError: import pickle as cPickle
import Environment, Utils, Options, Logs
from Logs import warn
from Constants import *
try:
from urllib import request
except:
from urllib import urlopen
else:
urlopen = request.urlopen
conf_template = '''# project %(app)s configured on %(now)s by
# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
# using %(args)s
#
'''
class ConfigurationError(Utils.WscriptError):
pass
autoconfig = False
"reconfigure the project automatically"
def find_file(filename, path_list):
"""find a file in a list of paths
@param filename: name of the file to search for
@param path_list: list of directories to search
@return: the first occurrence filename or '' if filename could not be found
"""
for directory in Utils.to_list(path_list):
if os.path.exists(os.path.join(directory, filename)):
return directory
return ''
def find_program_impl(env, filename, path_list=[], var=None, environ=None):
"""find a program in folders path_lst, and sets env[var]
@param env: environment
@param filename: name of the program to search for
@param path_list: list of directories to search for filename
@param var: environment value to be checked for in env or os.environ
@return: either the value that is referenced with [var] in env or os.environ
or the first occurrence filename or '' if filename could not be found
"""
if not environ:
environ = os.environ
try: path_list = path_list.split()
except AttributeError: pass
if var:
if env[var]: return env[var]
if var in environ: env[var] = environ[var]
if not path_list: path_list = environ.get('PATH', '').split(os.pathsep)
ext = (Options.platform == 'win32') and '.exe,.com,.bat,.cmd' or ''
for y in [filename+x for x in ext.split(',')]:
for directory in path_list:
x = os.path.join(directory, y)
if os.path.isfile(x):
if var: env[var] = x
return x
return ''
class ConfigurationContext(Utils.Context):
tests = {}
error_handlers = []
def __init__(self, env=None, blddir='', srcdir=''):
self.env = None
self.envname = ''
self.environ = dict(os.environ)
self.line_just = 40
self.blddir = blddir
self.srcdir = srcdir
self.all_envs = {}
# curdir: necessary for recursion
self.cwd = self.curdir = os.getcwd()
self.tools = [] # tools loaded in the configuration, and that will be loaded when building
self.setenv(DEFAULT)
self.lastprog = ''
self.hash = 0
self.files = []
self.tool_cache = []
if self.blddir:
self.post_init()
def post_init(self):
self.cachedir = os.path.join(self.blddir, CACHE_DIR)
path = os.path.join(self.blddir, WAF_CONFIG_LOG)
try: os.unlink(path)
except (OSError, IOError): pass
try:
self.log = open(path, 'w')
except (OSError, IOError):
self.fatal('could not open %r for writing' % path)
app = Utils.g_module.APPNAME
if app:
ver = getattr(Utils.g_module, 'VERSION', '')
if ver:
app = "%s (%s)" % (app, ver)
now = time.ctime()
pyver = sys.hexversion
systype = sys.platform
args = " ".join(sys.argv)
wafver = WAFVERSION
abi = ABI
self.log.write(conf_template % vars())
def __del__(self):
"""cleanup function: close config.log"""
# may be ran by the gc, not always after initialization
if hasattr(self, 'log') and self.log:
self.log.close()
def fatal(self, msg):
raise ConfigurationError(msg)
def check_tool(self, input, tooldir=None, funs=None):
"load a waf tool"
tools = Utils.to_list(input)
if tooldir: tooldir = Utils.to_list(tooldir)
for tool in tools:
tool = tool.replace('++', 'xx')
if tool == 'java': tool = 'javaw'
if tool.lower() == 'unittest': tool = 'unittestw'
# avoid loading the same tool more than once with the same functions
# used by composite projects
mag = (tool, id(self.env), funs)
if mag in self.tool_cache:
continue
self.tool_cache.append(mag)
if not tooldir:
# check if the tool exists in the Tools or 3rdparty folders
_Tools = Options.tooldir[0]
_3rdparty = os.sep.join((_Tools, '..', '3rdparty'))
for d in (_Tools, _3rdparty):
lst = os.listdir(d)
if tool + '.py' in lst:
break
else:
# try to download the tool from the repository then
for x in Utils.to_list(Options.remote_repo):
for sub in ['branches/waf-%s/wafadmin/3rdparty' % WAFVERSION, 'trunk/wafadmin/3rdparty']:
url = '/'.join((x, sub, tool + '.py'))
try:
web = urlopen(url)
if web.getcode() != 200:
continue
except Exception, e:
# on python3 urlopen throws an exception
continue
else:
try:
loc = open(_3rdparty + os.sep + tool + '.py', 'wb')
loc.write(web.read())
web.close()
finally:
loc.close()
Logs.warn('downloaded %s from %s' % (tool, url))
else:
break
module = Utils.load_tool(tool, tooldir)
if funs is not None:
self.eval_rules(funs)
else:
func = getattr(module, 'detect', None)
if func:
if type(func) is type(find_file): func(self)
else: self.eval_rules(func)
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
def sub_config(self, k):
"executes the configure function of a wscript module"
self.recurse(k, name='configure')
def pre_recurse(self, name_or_mod, path, nexdir):
return {'conf': self, 'ctx': self}
def post_recurse(self, name_or_mod, path, nexdir):
if not autoconfig:
return
self.hash = hash((self.hash, getattr(name_or_mod, 'waf_hash_val', name_or_mod)))
self.files.append(path)
def store(self, file=''):
"save the config results into the cache file"
if not os.path.isdir(self.cachedir):
os.makedirs(self.cachedir)
if not file:
file = open(os.path.join(self.cachedir, 'build.config.py'), 'w')
file.write('version = 0x%x\n' % HEXVERSION)
file.write('tools = %r\n' % self.tools)
file.close()
if not self.all_envs:
self.fatal('nothing to store in the configuration context!')
for key in self.all_envs:
tmpenv = self.all_envs[key]
tmpenv.store(os.path.join(self.cachedir, key + CACHE_SUFFIX))
def set_env_name(self, name, env):
"add a new environment called name"
self.all_envs[name] = env
return env
def retrieve(self, name, fromenv=None):
"retrieve an environment called name"
try:
env = self.all_envs[name]
except KeyError:
env = Environment.Environment()
env['PREFIX'] = os.path.abspath(os.path.expanduser(Options.options.prefix))
self.all_envs[name] = env
else:
if fromenv: warn("The environment %s may have been configured already" % name)
return env
def setenv(self, name):
"enable the environment called name"
self.env = self.retrieve(name)
self.envname = name
def add_os_flags(self, var, dest=None):
# do not use 'get' to make certain the variable is not defined
try: self.env.append_value(dest or var, Utils.to_list(self.environ[var]))
except KeyError: pass
def check_message_1(self, sr):
self.line_just = max(self.line_just, len(sr))
for x in ('\n', self.line_just * '-', '\n', sr, '\n'):
self.log.write(x)
Utils.pprint('NORMAL', "%s :" % sr.ljust(self.line_just), sep='')
def check_message_2(self, sr, color='GREEN'):
self.log.write(sr)
self.log.write('\n')
Utils.pprint(color, sr)
def check_message(self, th, msg, state, option=''):
sr = 'Checking for %s %s' % (th, msg)
self.check_message_1(sr)
p = self.check_message_2
if state: p('ok ' + str(option))
else: p('not found', 'YELLOW')
# FIXME remove in waf 1.6
# the parameter 'option' is not used (kept for compatibility)
def check_message_custom(self, th, msg, custom, option='', color='PINK'):
sr = 'Checking for %s %s' % (th, msg)
self.check_message_1(sr)
self.check_message_2(custom, color)
def find_program(self, filename, path_list=[], var=None, mandatory=False):
"wrapper that adds a configuration message"
ret = None
if var:
if self.env[var]:
ret = self.env[var]
elif var in os.environ:
ret = os.environ[var]
if not isinstance(filename, list): filename = [filename]
if not ret:
for x in filename:
ret = find_program_impl(self.env, x, path_list, var, environ=self.environ)
if ret: break
self.check_message_1('Checking for program %s' % ' or '.join(filename))
self.log.write(' find program=%r paths=%r var=%r\n -> %r\n' % (filename, path_list, var, ret))
if ret:
Utils.pprint('GREEN', str(ret))
else:
Utils.pprint('YELLOW', 'not found')
if mandatory:
self.fatal('The program %r is required' % filename)
if var:
self.env[var] = ret
return ret
def cmd_to_list(self, cmd):
"commands may be written in pseudo shell like 'ccache g++'"
if isinstance(cmd, str) and cmd.find(' '):
try:
os.stat(cmd)
except OSError:
return shlex.split(cmd)
else:
return [cmd]
return cmd
def __getattr__(self, name):
r = self.__class__.__dict__.get(name, None)
if r: return r
if name and name.startswith('require_'):
for k in ['check_', 'find_']:
n = name.replace('require_', k)
ret = self.__class__.__dict__.get(n, None)
if ret:
def run(*k, **kw):
r = ret(self, *k, **kw)
if not r:
self.fatal('requirement failure')
return r
return run
self.fatal('No such method %r' % name)
def eval_rules(self, rules):
self.rules = Utils.to_list(rules)
for x in self.rules:
f = getattr(self, x)
if not f: self.fatal("No such method '%s'." % x)
try:
f()
except Exception, e:
ret = self.err_handler(x, e)
if ret == BREAK:
break
elif ret == CONTINUE:
continue
else:
self.fatal(e)
def err_handler(self, fun, error):
pass
def conf(f):
"decorator: attach new configuration functions"
setattr(ConfigurationContext, f.__name__, f)
return f
def conftest(f):
"decorator: attach new configuration tests (registered as strings)"
ConfigurationContext.tests[f.__name__] = f
return conf(f)

76
tools/wafadmin/Constants.py

@ -1,76 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Yinon dot me gmail 2008
"""
these constants are somewhat public, try not to mess them
maintainer: the version number is updated from the top-level wscript file
"""
# do not touch these three lines, they are updated automatically
HEXVERSION=0x105016
WAFVERSION="1.5.16"
WAFREVISION = "7610:7647M"
ABI = 7
# permissions
O644 = 420
O755 = 493
MAXJOBS = 99999999
CACHE_DIR = 'c4che'
CACHE_SUFFIX = '.cache.py'
DBFILE = '.wafpickle-%d' % ABI
WSCRIPT_FILE = 'wscript'
WSCRIPT_BUILD_FILE = 'wscript_build'
WAF_CONFIG_LOG = 'config.log'
WAF_CONFIG_H = 'config.h'
SIG_NIL = 'iluvcuteoverload'
VARIANT = '_VARIANT_'
DEFAULT = 'Release'
SRCDIR = 'srcdir'
BLDDIR = 'blddir'
APPNAME = 'APPNAME'
VERSION = 'VERSION'
DEFINES = 'defines'
UNDEFINED = ()
BREAK = "break"
CONTINUE = "continue"
# task scheduler options
JOBCONTROL = "JOBCONTROL"
MAXPARALLEL = "MAXPARALLEL"
NORMAL = "NORMAL"
# task state
NOT_RUN = 0
MISSING = 1
CRASHED = 2
EXCEPTION = 3
SKIPPED = 8
SUCCESS = 9
ASK_LATER = -1
SKIP_ME = -2
RUN_ME = -3
LOG_FORMAT = "%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s"
HOUR_FORMAT = "%H:%M:%S"
TEST_OK = True
CFG_FILES = 'cfg_files'
# positive '->' install
# negative '<-' uninstall
INSTALL = 1337
UNINSTALL = -1337

210
tools/wafadmin/Environment.py

@ -1,210 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""Environment representation
There is one gotcha: getitem returns [] if the contents evals to False
This means env['foo'] = {}; print env['foo'] will print [] not {}
"""
import os, copy, re
import Logs, Options, Utils
from Constants import *
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
class Environment(object):
"""A safe-to-use dictionary, but do not attach functions to it please (break cPickle)
An environment instance can be stored into a file and loaded easily
"""
__slots__ = ("table", "parent")
def __init__(self, filename=None):
self.table = {}
#self.parent = None
if filename:
self.load(filename)
def __contains__(self, key):
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
def __str__(self):
keys = set()
cur = self
while cur:
keys.update(cur.table.keys())
cur = getattr(cur, 'parent', None)
keys = list(keys)
keys.sort()
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in keys])
def __getitem__(self, key):
try:
while 1:
x = self.table.get(key, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return []
def __setitem__(self, key, value):
self.table[key] = value
def __delitem__(self, key):
del self.table[key]
def pop(self, key, *args):
if len(args):
return self.table.pop(key, *args)
return self.table.pop(key)
def set_variant(self, name):
self.table[VARIANT] = name
def variant(self):
try:
while 1:
x = self.table.get(VARIANT, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return DEFAULT
def copy(self):
# TODO waf 1.6 rename this method derive, #368
newenv = Environment()
newenv.parent = self
return newenv
def detach(self):
"""TODO try it
modifying the original env will not change the copy"""
tbl = self.get_merged_dict()
try:
delattr(self, 'parent')
except AttributeError:
pass
else:
keys = tbl.keys()
for x in keys:
tbl[x] = copy.deepcopy(tbl[x])
self.table = tbl
def get_flat(self, key):
s = self[key]
if isinstance(s, str): return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
"""Gets a value that must be a list for further modification. The
list may be modified inplace and there is no need to
"self.table[var] = value" afterwards.
"""
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
else:
value = [value]
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
return value
def append_value(self, var, value):
current_value = self._get_list_value_for_modification(var)
if isinstance(value, list):
current_value.extend(value)
else:
current_value.append(value)
def prepend_value(self, var, value):
current_value = self._get_list_value_for_modification(var)
if isinstance(value, list):
current_value = value + current_value
# a new list: update the dictionary entry
self.table[var] = current_value
else:
current_value.insert(0, value)
# prepend unique would be ambiguous
def append_unique(self, var, value):
current_value = self._get_list_value_for_modification(var)
if isinstance(value, list):
for value_item in value:
if value_item not in current_value:
current_value.append(value_item)
else:
if value not in current_value:
current_value.append(value)
def get_merged_dict(self):
"""compute a merged table"""
table_list = []
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
merged_table = {}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self, filename):
"Write the variables into a file"
file = open(filename, 'w')
merged_table = self.get_merged_dict()
keys = list(merged_table.keys())
keys.sort()
for k in keys: file.write('%s = %r\n' % (k, merged_table[k]))
file.close()
def load(self, filename):
"Retrieve the variables from a file"
tbl = self.table
code = Utils.readf(filename)
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s', self.table)
def get_destdir(self):
"return the destdir, useful for installing"
if self.__getitem__('NOINSTALL'): return ''
return Options.options.destdir
def update(self, d):
for k, v in d.iteritems():
self[k] = v
def __getattr__(self, name):
if name in self.__slots__:
return object.__getattr__(self, name)
else:
return self[name]
def __setattr__(self, name, value):
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
self[name] = value
def __delattr__(self, name):
if name in self.__slots__:
object.__delattr__(self, name)
else:
del self[name]

134
tools/wafadmin/Logs.py

@ -1,134 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
import ansiterm
import os, re, logging, traceback, sys
from Constants import *
zones = ''
verbose = 0
colors_lst = {
'USE' : True,
'BOLD' :'\x1b[01;1m',
'RED' :'\x1b[01;31m',
'GREEN' :'\x1b[32m',
'YELLOW':'\x1b[33m',
'PINK' :'\x1b[35m',
'BLUE' :'\x1b[01;34m',
'CYAN' :'\x1b[36m',
'NORMAL':'\x1b[0m',
'cursor_on' :'\x1b[?25h',
'cursor_off' :'\x1b[?25l',
}
got_tty = False
term = os.environ.get('TERM', 'dumb')
if not term in ['dumb', 'emacs']:
try:
got_tty = sys.stderr.isatty() or (sys.platform == 'win32' and term in ['xterm', 'msys'])
except AttributeError:
pass
import Utils
if not got_tty or 'NOCOLOR' in os.environ:
colors_lst['USE'] = False
# test
#if sys.platform == 'win32':
# colors_lst['USE'] = True
def get_color(cl):
if not colors_lst['USE']: return ''
return colors_lst.get(cl, '')
class foo(object):
def __getattr__(self, a):
return get_color(a)
def __call__(self, a):
return get_color(a)
colors = foo()
re_log = re.compile(r'(\w+): (.*)', re.M)
class log_filter(logging.Filter):
def __init__(self, name=None):
pass
def filter(self, rec):
rec.c1 = colors.PINK
rec.c2 = colors.NORMAL
rec.zone = rec.module
if rec.levelno >= logging.INFO:
if rec.levelno >= logging.ERROR:
rec.c1 = colors.RED
elif rec.levelno >= logging.WARNING:
rec.c1 = colors.YELLOW
else:
rec.c1 = colors.GREEN
return True
zone = ''
m = re_log.match(rec.msg)
if m:
zone = rec.zone = m.group(1)
rec.msg = m.group(2)
if zones:
return getattr(rec, 'zone', '') in zones or '*' in zones
elif not verbose > 2:
return False
return True
class formatter(logging.Formatter):
def __init__(self):
logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
def format(self, rec):
if rec.levelno >= logging.WARNING or rec.levelno == logging.INFO:
try:
return '%s%s%s' % (rec.c1, rec.msg.decode('utf-8'), rec.c2)
except:
return rec.c1+rec.msg+rec.c2
return logging.Formatter.format(self, rec)
def debug(*k, **kw):
if verbose:
k = list(k)
k[0] = k[0].replace('\n', ' ')
logging.debug(*k, **kw)
def error(*k, **kw):
logging.error(*k, **kw)
if verbose > 1:
if isinstance(k[0], Utils.WafError):
st = k[0].stack
else:
st = traceback.extract_stack()
if st:
st = st[:-1]
buf = []
for filename, lineno, name, line in st:
buf.append(' File "%s", line %d, in %s' % (filename, lineno, name))
if line:
buf.append(' %s' % line.strip())
if buf: logging.error("\n".join(buf))
warn = logging.warn
info = logging.info
def init_log():
log = logging.getLogger()
log.handlers = []
log.filters = []
hdlr = logging.StreamHandler()
hdlr.setFormatter(formatter())
log.addHandler(hdlr)
log.addFilter(log_filter())
log.setLevel(logging.DEBUG)
# may be initialized more than once
init_log()

693
tools/wafadmin/Node.py

@ -1,693 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""
Node: filesystem structure, contains lists of nodes
IMPORTANT:
1. Each file/folder is represented by exactly one node.
2. Most would-be class properties are stored in Build: nodes to depend on, signature, flags, ..
unused class members increase the .wafpickle file size sensibly with lots of objects.
3. The build is launched from the top of the build dir (for example, in _build_/).
4. Node should not be instantiated directly.
Each instance of Build.BuildContext has a Node subclass.
(aka: 'Nodu', see BuildContext initializer)
The BuildContext is referenced here as self.__class__.bld
Its Node class is referenced here as self.__class__
The public and advertised apis are the following:
${TGT} -> dir/to/file.ext
${TGT[0].base()} -> dir/to/file
${TGT[0].dir(env)} -> dir/to
${TGT[0].file()} -> file.ext
${TGT[0].file_base()} -> file
${TGT[0].suffix()} -> .ext
${TGT[0].abspath(env)} -> /path/to/dir/to/file.ext
"""
import os, sys, fnmatch, re, stat
import Utils, Constants
UNDEFINED = 0
DIR = 1
FILE = 2
BUILD = 3
type_to_string = {UNDEFINED: "unk", DIR: "dir", FILE: "src", BUILD: "bld"}
# These fnmatch expressions are used by default to prune the directory tree
# while doing the recursive traversal in the find_iter method of the Node class.
prune_pats = '.git .bzr .hg .svn _MTN _darcs CVS SCCS'.split()
# These fnmatch expressions are used by default to exclude files and dirs
# while doing the recursive traversal in the find_iter method of the Node class.
exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
# These Utils.jar_regexp expressions are used by default to exclude files and dirs and also prune the directory tree
# while doing the recursive traversal in the ant_glob method of the Node class.
exclude_regs = '''
**/*~
**/#*#
**/.#*
**/%*%
**/._*
**/CVS
**/CVS/**
**/.cvsignore
**/SCCS
**/SCCS/**
**/vssver.scc
**/.svn
**/.svn/**
**/.git
**/.git/**
**/.gitignore
**/.bzr
**/.bzr/**
**/.hg
**/.hg/**
**/_MTN
**/_MTN/**
**/_darcs
**/_darcs/**
**/.DS_Store'''
class Node(object):
__slots__ = ("name", "parent", "id", "childs")
def __init__(self, name, parent, node_type = UNDEFINED):
self.name = name
self.parent = parent
# assumption: one build object at a time
self.__class__.bld.id_nodes += 4
self.id = self.__class__.bld.id_nodes + node_type
if node_type == DIR: self.childs = {}
# We do not want to add another type attribute (memory)
# use the id to find out: type = id & 3
# for setting: new type = type + x - type & 3
if parent and name in parent.childs:
raise Utils.WafError('node %s exists in the parent files %r already' % (name, parent))
if parent: parent.childs[name] = self
def __setstate__(self, data):
if len(data) == 4:
(self.parent, self.name, self.id, self.childs) = data
else:
(self.parent, self.name, self.id) = data
def __getstate__(self):
if getattr(self, 'childs', None) is None:
return (self.parent, self.name, self.id)
else:
return (self.parent, self.name, self.id, self.childs)
def __str__(self):
if not self.parent: return ''
return "%s://%s" % (type_to_string[self.id & 3], self.abspath())
def __repr__(self):
return self.__str__()
def __hash__(self):
"expensive, make certain it is not used"
raise Utils.WafError('nodes, you are doing it wrong')
def __copy__(self):
"nodes are not supposed to be copied"
raise Utils.WafError('nodes are not supposed to be cloned')
def get_type(self):
return self.id & 3
def set_type(self, t):
"dangerous, you are not supposed to use this"
self.id = self.id + t - self.id & 3
def dirs(self):
return [x for x in self.childs.values() if x.id & 3 == DIR]
def files(self):
return [x for x in self.childs.values() if x.id & 3 == FILE]
def get_dir(self, name, default=None):
node = self.childs.get(name, None)
if not node or node.id & 3 != DIR: return default
return node
def get_file(self, name, default=None):
node = self.childs.get(name, None)
if not node or node.id & 3 != FILE: return default
return node
def get_build(self, name, default=None):
node = self.childs.get(name, None)
if not node or node.id & 3 != BUILD: return default
return node
def find_resource(self, lst):
"Find an existing input file: either a build node declared previously or a source node"
if isinstance(lst, str):
lst = Utils.split_path(lst)
if len(lst) == 1:
parent = self
else:
parent = self.find_dir(lst[:-1])
if not parent: return None
self.__class__.bld.rescan(parent)
name = lst[-1]
node = parent.childs.get(name, None)
if node:
tp = node.id & 3
if tp == FILE or tp == BUILD:
return node
else:
return None
tree = self.__class__.bld
if not name in tree.cache_dir_contents[parent.id]:
return None
path = parent.abspath() + os.sep + name
try:
st = Utils.h_file(path)
except IOError:
return None
child = self.__class__(name, parent, FILE)
tree.node_sigs[0][child.id] = st
return child
def find_or_declare(self, lst):
"Used for declaring a build node representing a file being built"
if isinstance(lst, str):
lst = Utils.split_path(lst)
if len(lst) == 1:
parent = self
else:
parent = self.find_dir(lst[:-1])
if not parent: return None
self.__class__.bld.rescan(parent)
name = lst[-1]
node = parent.childs.get(name, None)
if node:
tp = node.id & 3
if tp != BUILD:
raise Utils.WafError('find_or_declare cannot return a build node (build files in the source directory %r?)' % lst)
return node
node = self.__class__(name, parent, BUILD)
return node
def find_dir(self, lst):
"search a folder in the filesystem"
if isinstance(lst, str):
lst = Utils.split_path(lst)
current = self
for name in lst:
self.__class__.bld.rescan(current)
prev = current
if not current.parent and name == current.name:
continue
elif not name:
continue
elif name == '.':
continue
elif name == '..':
current = current.parent or current
else:
current = prev.childs.get(name, None)
if current is None:
dir_cont = self.__class__.bld.cache_dir_contents
if prev.id in dir_cont and name in dir_cont[prev.id]:
if not prev.name:
if os.sep == '/':
# cygwin //machine/share
dirname = os.sep + name
else:
# windows c:
dirname = name
else:
# regular path
dirname = prev.abspath() + os.sep + name
if not os.path.isdir(dirname):
return None
current = self.__class__(name, prev, DIR)
elif (not prev.name and len(name) == 2 and name[1] == ':') or name.startswith('\\\\'):
# drive letter or \\ path for windows
current = self.__class__(name, prev, DIR)
else:
return None
else:
if current.id & 3 != DIR:
return None
return current
def ensure_dir_node_from_path(self, lst):
"used very rarely, force the construction of a branch of node instance for representing folders"
if isinstance(lst, str):
lst = Utils.split_path(lst)
current = self
for name in lst:
if not name:
continue
elif name == '.':
continue
elif name == '..':
current = current.parent or current
else:
prev = current
current = prev.childs.get(name, None)
if current is None:
current = self.__class__(name, prev, DIR)
return current
def exclusive_build_node(self, path):
"""
create a hierarchy in the build dir (no source folders) for ill-behaving compilers
the node is not hashed, so you must do it manually
after declaring such a node, find_dir and find_resource should work as expected
"""
lst = Utils.split_path(path)
name = lst[-1]
if len(lst) > 1:
parent = None
try:
parent = self.find_dir(lst[:-1])
except OSError:
pass
if not parent:
parent = self.ensure_dir_node_from_path(lst[:-1])
self.__class__.bld.rescan(parent)
else:
try:
self.__class__.bld.rescan(parent)
except OSError:
pass
else:
parent = self
node = parent.childs.get(name, None)
if not node:
node = self.__class__(name, parent, BUILD)
return node
def path_to_parent(self, parent):
"path relative to a direct ancestor, as string"
lst = []
p = self
h1 = parent.height()
h2 = p.height()
while h2 > h1:
h2 -= 1
lst.append(p.name)
p = p.parent
if lst:
lst.reverse()
ret = os.path.join(*lst)
else:
ret = ''
return ret
def find_ancestor(self, node):
"find a common ancestor for two nodes - for the shortest path in hierarchy"
dist = self.height() - node.height()
if dist < 0: return node.find_ancestor(self)
# now the real code
cand = self
while dist > 0:
cand = cand.parent
dist -= 1
if cand == node: return cand
cursor = node
while cand.parent:
cand = cand.parent
cursor = cursor.parent
if cand == cursor: return cand
def relpath_gen(self, from_node):
"string representing a relative path between self to another node"
if self == from_node: return '.'
if from_node.parent == self: return '..'
# up_path is '../../../' and down_path is 'dir/subdir/subdir/file'
ancestor = self.find_ancestor(from_node)
lst = []
cand = self
while not cand.id == ancestor.id:
lst.append(cand.name)
cand = cand.parent
cand = from_node
while not cand.id == ancestor.id:
lst.append('..')
cand = cand.parent
lst.reverse()
return os.sep.join(lst)
def nice_path(self, env=None):
"printed in the console, open files easily from the launch directory"
tree = self.__class__.bld
ln = tree.launch_node()
if self.id & 3 == FILE: return self.relpath_gen(ln)
else: return os.path.join(tree.bldnode.relpath_gen(ln), env.variant(), self.relpath_gen(tree.srcnode))
def is_child_of(self, node):
"does this node belong to the subtree node"
p = self
diff = self.height() - node.height()
while diff > 0:
diff -= 1
p = p.parent
return p.id == node.id
def variant(self, env):
"variant, or output directory for this node, a source has for variant 0"
if not env: return 0
elif self.id & 3 == FILE: return 0
else: return env.variant()
def height(self):
"amount of parents"
# README a cache can be added here if necessary
d = self
val = -1
while d:
d = d.parent
val += 1
return val
# helpers for building things
def abspath(self, env=None):
"""
absolute path
@param env [Environment]:
* obligatory for build nodes: build/variant/src/dir/bar.o
* optional for dirs: get either src/dir or build/variant/src/dir
* excluded for source nodes: src/dir/bar.c
Instead of computing the absolute path each time again,
store the already-computed absolute paths in one of (variants+1) dictionaries:
bld.cache_node_abspath[0] holds absolute paths for source nodes.
bld.cache_node_abspath[variant] holds the absolute path for the build nodes
which reside in the variant given by env.
"""
## absolute path - hot zone, so do not touch
# less expensive
variant = (env and (self.id & 3 != FILE) and env.variant()) or 0
ret = self.__class__.bld.cache_node_abspath[variant].get(self.id, None)
if ret: return ret
if not variant:
# source directory
if not self.parent:
val = os.sep == '/' and os.sep or ''
elif not self.parent.name: # root
val = (os.sep == '/' and os.sep or '') + self.name
else:
val = self.parent.abspath() + os.sep + self.name
else:
# build directory
val = os.sep.join((self.__class__.bld.bldnode.abspath(), variant, self.path_to_parent(self.__class__.bld.srcnode)))
self.__class__.bld.cache_node_abspath[variant][self.id] = val
return val
def change_ext(self, ext):
"node of the same path, but with a different extension - hot zone so do not touch"
name = self.name
k = name.rfind('.')
if k >= 0:
name = name[:k] + ext
else:
name = name + ext
return self.parent.find_or_declare([name])
def src_dir(self, env):
"src path without the file name"
return self.parent.srcpath(env)
def bld_dir(self, env):
"build path without the file name"
return self.parent.bldpath(env)
def bld_base(self, env):
"build path without the extension: src/dir/foo(.cpp)"
s = os.path.splitext(self.name)[0]
return os.path.join(self.bld_dir(env), s)
def bldpath(self, env=None):
"path seen from the build dir default/src/foo.cpp"
if self.id & 3 == FILE:
return self.relpath_gen(self.__class__.bld.bldnode)
p = self.path_to_parent(self.__class__.bld.srcnode)
if p is not '':
return env.variant() + os.sep + p
return env.variant()
def srcpath(self, env=None):
"path in the srcdir from the build dir ../src/foo.cpp"
if self.id & 3 == BUILD:
return self.bldpath(env)
return self.relpath_gen(self.__class__.bld.bldnode)
def read(self, env):
"get the contents of a file, it is not used anywhere for the moment"
return Utils.readf(self.abspath(env))
def dir(self, env):
"scons-like"
return self.parent.abspath(env)
def file(self):
"scons-like"
return self.name
def file_base(self):
"scons-like"
return os.path.splitext(self.name)[0]
def suffix(self):
"scons-like - hot zone so do not touch"
k = max(0, self.name.rfind('.'))
return self.name[k:]
def find_iter_impl(self, src=True, bld=True, dir=True, accept_name=None, is_prune=None, maxdepth=25):
"""find nodes in the filesystem hierarchy, try to instanciate the nodes passively; same gotcha as ant_glob"""
bld_ctx = self.__class__.bld
bld_ctx.rescan(self)
for name in bld_ctx.cache_dir_contents[self.id]:
if accept_name(self, name):
node = self.find_resource(name)
if node:
if src and node.id & 3 == FILE:
yield node
else:
node = self.find_dir(name)
if node and node.id != bld_ctx.bldnode.id:
if dir:
yield node
if not is_prune(self, name):
if maxdepth:
for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
yield k
else:
if not is_prune(self, name):
node = self.find_resource(name)
if not node:
# not a file, it is a dir
node = self.find_dir(name)
if node and node.id != bld_ctx.bldnode.id:
if maxdepth:
for k in node.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth - 1):
yield k
if bld:
for node in self.childs.values():
if node.id == bld_ctx.bldnode.id:
continue
if node.id & 3 == BUILD:
if accept_name(self, node.name):
yield node
raise StopIteration
def find_iter(self, in_pat=['*'], ex_pat=exclude_pats, prune_pat=prune_pats, src=True, bld=True, dir=False, maxdepth=25, flat=False):
"""find nodes recursively, this returns everything but folders by default; same gotcha as ant_glob"""
if not (src or bld or dir):
raise StopIteration
if self.id & 3 != DIR:
raise StopIteration
in_pat = Utils.to_list(in_pat)
ex_pat = Utils.to_list(ex_pat)
prune_pat = Utils.to_list(prune_pat)
def accept_name(node, name):
for pat in ex_pat:
if fnmatch.fnmatchcase(name, pat):
return False
for pat in in_pat:
if fnmatch.fnmatchcase(name, pat):
return True
return False
def is_prune(node, name):
for pat in prune_pat:
if fnmatch.fnmatchcase(name, pat):
return True
return False
ret = self.find_iter_impl(src, bld, dir, accept_name, is_prune, maxdepth=maxdepth)
if flat:
return " ".join([x.relpath_gen(self) for x in ret])
return ret
def ant_glob(self, *k, **kw):
"""
known gotcha: will enumerate the files, but only if the folder exists in the source directory
"""
src=kw.get('src', 1)
bld=kw.get('bld', 0)
dir=kw.get('dir', 0)
excl = kw.get('excl', exclude_regs)
incl = k and k[0] or kw.get('incl', '**')
def to_pat(s):
lst = Utils.to_list(s)
ret = []
for x in lst:
x = x.replace('//', '/')
if x.endswith('/'):
x += '**'
lst2 = x.split('/')
accu = []
for k in lst2:
if k == '**':
accu.append(k)
else:
k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.')
k = '^%s$' % k
#print "pattern", k
accu.append(re.compile(k))
ret.append(accu)
return ret
def filtre(name, nn):
ret = []
for lst in nn:
if not lst:
pass
elif lst[0] == '**':
ret.append(lst)
if len(lst) > 1:
if lst[1].match(name):
ret.append(lst[2:])
else:
ret.append([])
elif lst[0].match(name):
ret.append(lst[1:])
return ret
def accept(name, pats):
nacc = filtre(name, pats[0])
nrej = filtre(name, pats[1])
if [] in nrej:
nacc = []
return [nacc, nrej]
def ant_iter(nodi, maxdepth=25, pats=[]):
nodi.__class__.bld.rescan(nodi)
for name in nodi.__class__.bld.cache_dir_contents[nodi.id]:
npats = accept(name, pats)
if npats and npats[0]:
accepted = [] in npats[0]
#print accepted, nodi, name
node = nodi.find_resource(name)
if node and accepted:
if src and node.id & 3 == FILE:
yield node
else:
node = nodi.find_dir(name)
if node and node.id != nodi.__class__.bld.bldnode.id:
if accepted and dir:
yield node
if maxdepth:
for k in ant_iter(node, maxdepth=maxdepth - 1, pats=npats):
yield k
if bld:
for node in nodi.childs.values():
if node.id == nodi.__class__.bld.bldnode.id:
continue
if node.id & 3 == BUILD:
npats = accept(node.name, pats)
if npats and npats[0] and [] in npats[0]:
yield node
raise StopIteration
ret = [x for x in ant_iter(self, pats=[to_pat(incl), to_pat(excl)])]
if kw.get('flat', True):
return " ".join([x.relpath_gen(self) for x in ret])
return ret
def update_build_dir(self, env=None):
if not env:
for env in bld.all_envs:
self.update_build_dir(env)
return
path = self.abspath(env)
lst = Utils.listdir(path)
try:
self.__class__.bld.cache_dir_contents[self.id].update(lst)
except KeyError:
self.__class__.bld.cache_dir_contents[self.id] = set(lst)
self.__class__.bld.cache_scanned_folders[self.id] = True
for k in lst:
npath = path + os.sep + k
st = os.stat(npath)
if stat.S_ISREG(st[stat.ST_MODE]):
ick = self.find_or_declare(k)
if not (ick.id in self.__class__.bld.node_sigs[env.variant()]):
self.__class__.bld.node_sigs[env.variant()][ick.id] = Constants.SIG_NIL
elif stat.S_ISDIR(st[stat.ST_MODE]):
child = self.find_dir(k)
if not child:
child = self.ensure_dir_node_from_path(k)
child.update_build_dir(env)
class Nodu(Node):
pass

279
tools/wafadmin/Options.py

@ -1,279 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Scott Newton, 2005 (scottn)
# Thomas Nagy, 2006 (ita)
"Custom command-line options"
import os, sys, imp, types, tempfile, optparse
import Logs, Utils
from Constants import *
cmds = 'distclean configure build install clean uninstall check dist distcheck'.split()
# TODO remove in waf 1.6 the following two
commands = {}
is_install = False
options = {}
arg_line = []
launch_dir = ''
tooldir = ''
lockfile = os.environ.get('WAFLOCK', '.lock-wscript')
try: cache_global = os.path.abspath(os.environ['WAFCACHE'])
except KeyError: cache_global = ''
platform = Utils.unversioned_sys_platform()
conf_file = 'conf-runs-%s-%d.pickle' % (platform, ABI)
remote_repo = ['http://waf.googlecode.com/svn/']
"""remote directory for the plugins"""
# Such a command-line should work: JOBS=4 PREFIX=/opt/ DESTDIR=/tmp/ahoj/ waf configure
default_prefix = os.environ.get('PREFIX')
if not default_prefix:
if platform == 'win32': default_prefix = tempfile.gettempdir()
else: default_prefix = '/usr/local/'
default_jobs = os.environ.get('JOBS', -1)
if default_jobs < 1:
try:
if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
default_jobs = os.sysconf('SC_NPROCESSORS_ONLN')
else:
default_jobs = int(Utils.cmd_output(['sysctl', '-n', 'hw.ncpu']))
except:
if os.name == 'java': # platform.system() == 'Java'
from java.lang import Runtime
default_jobs = Runtime.getRuntime().availableProcessors()
else:
# environment var defined on win32
default_jobs = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
default_destdir = os.environ.get('DESTDIR', '')
def get_usage(self):
cmds_str = []
module = Utils.g_module
if module:
# create the help messages for commands
tbl = module.__dict__
keys = list(tbl.keys())
keys.sort()
if 'build' in tbl:
if not module.build.__doc__:
module.build.__doc__ = 'builds the project'
if 'configure' in tbl:
if not module.configure.__doc__:
module.configure.__doc__ = 'configures the project'
ban = ['set_options', 'init', 'shutdown']
optlst = [x for x in keys if not x in ban
and type(tbl[x]) is type(parse_args_impl)
and tbl[x].__doc__
and not x.startswith('_')]
just = max([len(x) for x in optlst])
for x in optlst:
cmds_str.append(' %s: %s' % (x.ljust(just), tbl[x].__doc__))
ret = '\n'.join(cmds_str)
else:
ret = ' '.join(cmds)
return '''waf [command] [options]
Main commands (example: ./waf build -j4)
%s
''' % ret
setattr(optparse.OptionParser, 'get_usage', get_usage)
def create_parser(module=None):
Logs.debug('options: create_parser is called')
parser = optparse.OptionParser(conflict_handler="resolve", version = 'waf %s (%s)' % (WAFVERSION, WAFREVISION))
parser.formatter.width = Utils.get_term_cols()
p = parser.add_option
p('-j', '--jobs',
type = 'int',
default = default_jobs,
help = 'amount of parallel jobs (%r)' % default_jobs,
dest = 'jobs')
p('-k', '--keep',
action = 'store_true',
default = False,
help = 'keep running happily on independent task groups',
dest = 'keep')
p('-v', '--verbose',
action = 'count',
default = 0,
help = 'verbosity level -v -vv or -vvv [default: 0]',
dest = 'verbose')
p('--nocache',
action = 'store_true',
default = False,
help = 'ignore the WAFCACHE (if set)',
dest = 'nocache')
p('--zones',
action = 'store',
default = '',
help = 'debugging zones (task_gen, deps, tasks, etc)',
dest = 'zones')
p('-p', '--progress',
action = 'count',
default = 0,
help = '-p: progress bar; -pp: ide output',
dest = 'progress_bar')
p('--targets',
action = 'store',
default = '',
help = 'build given task generators, e.g. "target1,target2"',
dest = 'compile_targets')
gr = optparse.OptionGroup(parser, 'configuration options')
parser.add_option_group(gr)
gr.add_option('-b', '--blddir',
action = 'store',
default = '',
help = 'build dir for the project (configuration)',
dest = 'blddir')
gr.add_option('-s', '--srcdir',
action = 'store',
default = '',
help = 'src dir for the project (configuration)',
dest = 'srcdir')
gr.add_option('--prefix',
help = 'installation prefix (configuration) [default: %r]' % default_prefix,
default = default_prefix,
dest = 'prefix')
gr = optparse.OptionGroup(parser, 'installation options')
parser.add_option_group(gr)
gr.add_option('--destdir',
help = 'installation root [default: %r]' % default_destdir,
default = default_destdir,
dest = 'destdir')
gr.add_option('-f', '--force',
action = 'store_true',
default = False,
help = 'force file installation',
dest = 'force')
return parser
def parse_args_impl(parser, _args=None):
global options, commands, arg_line
(options, args) = parser.parse_args(args=_args)
arg_line = args
#arg_line = args[:] # copy
# By default, 'waf' is equivalent to 'waf build'
commands = {}
for var in cmds: commands[var] = 0
if not args:
commands['build'] = 1
args.append('build')
# Parse the command arguments
for arg in args:
commands[arg] = True
# the check thing depends on the build
if 'check' in args:
idx = args.index('check')
try:
bidx = args.index('build')
if bidx > idx:
raise ValueError('build before check')
except ValueError, e:
args.insert(idx, 'build')
if args[0] != 'init':
args.insert(0, 'init')
# TODO -k => -j0
if options.keep: options.jobs = 1
if options.jobs < 1: options.jobs = 1
if 'install' in sys.argv or 'uninstall' in sys.argv:
# absolute path only if set
options.destdir = options.destdir and os.path.abspath(os.path.expanduser(options.destdir))
Logs.verbose = options.verbose
Logs.init_log()
if options.zones:
Logs.zones = options.zones.split(',')
if not Logs.verbose: Logs.verbose = 1
elif Logs.verbose > 0:
Logs.zones = ['runner']
if Logs.verbose > 2:
Logs.zones = ['*']
# TODO waf 1.6
# 1. rename the class to OptionsContext
# 2. instead of a class attribute, use a module (static 'parser')
# 3. parse_args_impl was made in times when we did not know about binding new methods to classes
class Handler(Utils.Context):
"""loads wscript modules in folders for adding options
This class should be named 'OptionsContext'
A method named 'recurse' is bound when used by the module Scripting"""
parser = None
# make it possible to access the reference, like Build.bld
def __init__(self, module=None):
self.parser = create_parser(module)
self.cwd = os.getcwd()
Handler.parser = self
def add_option(self, *k, **kw):
self.parser.add_option(*k, **kw)
def add_option_group(self, *k, **kw):
return self.parser.add_option_group(*k, **kw)
def get_option_group(self, opt_str):
return self.parser.get_option_group(opt_str)
def sub_options(self, *k, **kw):
if not k: raise Utils.WscriptError('folder expected')
self.recurse(k[0], name='set_options')
def tool_options(self, *k, **kw):
Utils.python_24_guard()
if not k[0]:
raise Utils.WscriptError('invalid tool_options call %r %r' % (k, kw))
tools = Utils.to_list(k[0])
# TODO waf 1.6 remove the global variable tooldir
path = Utils.to_list(kw.get('tdir', kw.get('tooldir', tooldir)))
for tool in tools:
tool = tool.replace('++', 'xx')
if tool == 'java': tool = 'javaw'
if tool.lower() == 'unittest': tool = 'unittestw'
module = Utils.load_tool(tool, path)
try:
fun = module.set_options
except AttributeError:
pass
else:
fun(kw.get('option_group', self))
def parse_args(self, args=None):
parse_args_impl(self.parser, args)

229
tools/wafadmin/Runner.py

@ -1,229 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"Execute the tasks"
import sys, random, time, threading, traceback
try: from Queue import Queue
except ImportError: from queue import Queue
import Build, Utils, Logs, Options
from Logs import debug, error
from Constants import *
GAP = 15
run_old = threading.Thread.run
def run(*args, **kwargs):
try:
run_old(*args, **kwargs)
except (KeyboardInterrupt, SystemExit):
raise
except:
sys.excepthook(*sys.exc_info())
threading.Thread.run = run
class TaskConsumer(threading.Thread):
ready = Queue(0)
consumers = []
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(1)
self.start()
def run(self):
try:
self.loop()
except:
pass
def loop(self):
while 1:
tsk = TaskConsumer.ready.get()
m = tsk.master
if m.stop:
m.out.put(tsk)
continue
try:
tsk.generator.bld.printout(tsk.display())
if tsk.__class__.stat: ret = tsk.__class__.stat(tsk)
# actual call to task's run() function
else: ret = tsk.call_run()
except Exception, e:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
# TODO cleanup
m.error_handler(tsk)
m.out.put(tsk)
continue
if ret:
tsk.err_code = ret
tsk.hasrun = CRASHED
else:
try:
tsk.post_run()
except Utils.WafError:
pass
except Exception:
tsk.err_msg = Utils.ex_stack()
tsk.hasrun = EXCEPTION
else:
tsk.hasrun = SUCCESS
if tsk.hasrun != SUCCESS:
m.error_handler(tsk)
m.out.put(tsk)
class Parallel(object):
"""
keep the consumer threads busy, and avoid consuming cpu cycles
when no more tasks can be added (end of the build, etc)
"""
def __init__(self, bld, j=2):
# number of consumers
self.numjobs = j
self.manager = bld.task_manager
self.manager.current_group = 0
self.total = self.manager.total()
# tasks waiting to be processed - IMPORTANT
self.outstanding = []
self.maxjobs = MAXJOBS
# tasks that are awaiting for another task to complete
self.frozen = []
# tasks returned by the consumers
self.out = Queue(0)
self.count = 0 # tasks not in the producer area
self.processed = 1 # progress indicator
self.stop = False # error condition to stop the build
self.error = False # error flag
def get_next(self):
"override this method to schedule the tasks in a particular order"
if not self.outstanding:
return None
return self.outstanding.pop(0)
def postpone(self, tsk):
"override this method to schedule the tasks in a particular order"
# TODO consider using a deque instead
if random.randint(0, 1):
self.frozen.insert(0, tsk)
else:
self.frozen.append(tsk)
def refill_task_list(self):
"called to set the next group of tasks"
while self.count > self.numjobs + GAP or self.count >= self.maxjobs:
self.get_out()
while not self.outstanding:
if self.count:
self.get_out()
if self.frozen:
self.outstanding += self.frozen
self.frozen = []
elif not self.count:
(jobs, tmp) = self.manager.get_next_set()
if jobs != None: self.maxjobs = jobs
if tmp: self.outstanding += tmp
break
def get_out(self):
"the tasks that are put to execute are all collected using get_out"
ret = self.out.get()
self.manager.add_finished(ret)
if not self.stop and getattr(ret, 'more_tasks', None):
self.outstanding += ret.more_tasks
self.total += len(ret.more_tasks)
self.count -= 1
def error_handler(self, tsk):
"by default, errors make the build stop (not thread safe so be careful)"
if not Options.options.keep:
self.stop = True
self.error = True
def start(self):
"execute the tasks"
if TaskConsumer.consumers:
# the worker pool is usually loaded lazily (see below)
# in case it is re-used with a different value of numjobs:
while len(TaskConsumer.consumers) < self.numjobs:
TaskConsumer.consumers.append(TaskConsumer())
while not self.stop:
self.refill_task_list()
# consider the next task
tsk = self.get_next()
if not tsk:
if self.count:
# tasks may add new ones after they are run
continue
else:
# no tasks to run, no tasks running, time to exit
break
if tsk.hasrun:
# if the task is marked as "run", just skip it
self.processed += 1
self.manager.add_finished(tsk)
continue
try:
st = tsk.runnable_status()
except Exception, e:
self.processed += 1
if self.stop and not Options.options.keep:
tsk.hasrun = SKIPPED
self.manager.add_finished(tsk)
continue
self.error_handler(tsk)
self.manager.add_finished(tsk)
tsk.hasrun = EXCEPTION
tsk.err_msg = Utils.ex_stack()
continue
if st == ASK_LATER:
self.postpone(tsk)
elif st == SKIP_ME:
self.processed += 1
tsk.hasrun = SKIPPED
self.manager.add_finished(tsk)
else:
# run me: put the task in ready queue
tsk.position = (self.processed, self.total)
self.count += 1
tsk.master = self
TaskConsumer.ready.put(tsk)
self.processed += 1
# create the consumer threads only if there is something to consume
if not TaskConsumer.consumers:
TaskConsumer.consumers = [TaskConsumer() for i in xrange(self.numjobs)]
# self.count represents the tasks that have been made available to the consumer threads
# collect all the tasks after an error else the message may be incomplete
while self.error and self.count:
self.get_out()
#print loop
assert (self.count == 0 or self.stop)

586
tools/wafadmin/Scripting.py

@ -1,586 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"Module called for configuring, compiling and installing targets"
import os, sys, shutil, traceback, datetime, inspect, errno
import Utils, Configure, Build, Logs, Options, Environment, Task
from Logs import error, warn, info
from Constants import *
g_gz = 'bz2'
commands = []
def prepare_impl(t, cwd, ver, wafdir):
Options.tooldir = [t]
Options.launch_dir = cwd
# some command-line options can be processed immediately
if '--version' in sys.argv:
opt_obj = Options.Handler()
opt_obj.curdir = cwd
opt_obj.parse_args()
sys.exit(0)
# now find the wscript file
msg1 = 'Waf: Please run waf from a directory containing a file named "%s" or run distclean' % WSCRIPT_FILE
# in theory projects can be configured in an autotool-like manner:
# mkdir build && cd build && ../waf configure && ../waf
build_dir_override = None
candidate = None
lst = os.listdir(cwd)
search_for_candidate = True
if WSCRIPT_FILE in lst:
candidate = cwd
elif 'configure' in sys.argv and not WSCRIPT_BUILD_FILE in lst:
# autotool-like configuration
calldir = os.path.abspath(os.path.dirname(sys.argv[0]))
if WSCRIPT_FILE in os.listdir(calldir):
candidate = calldir
search_for_candidate = False
else:
error('arg[0] directory does not contain a wscript file')
sys.exit(1)
build_dir_override = cwd
# climb up to find a script if it is not found
while search_for_candidate:
if len(cwd) <= 3:
break # stop at / or c:
dirlst = os.listdir(cwd)
if WSCRIPT_FILE in dirlst:
candidate = cwd
if 'configure' in sys.argv and candidate:
break
if Options.lockfile in dirlst:
env = Environment.Environment()
try:
env.load(os.path.join(cwd, Options.lockfile))
except:
error('could not load %r' % Options.lockfile)
try:
os.stat(env['cwd'])
except:
candidate = cwd
else:
candidate = env['cwd']
break
cwd = os.path.dirname(cwd) # climb up
if not candidate:
# check if the user only wanted to display the help
if '-h' in sys.argv or '--help' in sys.argv:
warn('No wscript file found: the help message may be incomplete')
opt_obj = Options.Handler()
opt_obj.curdir = cwd
opt_obj.parse_args()
else:
error(msg1)
sys.exit(0)
# We have found wscript, but there is no guarantee that it is valid
try:
os.chdir(candidate)
except OSError:
raise Utils.WafError("the folder %r is unreadable" % candidate)
# define the main module containing the functions init, shutdown, ..
Utils.set_main_module(os.path.join(candidate, WSCRIPT_FILE))
if build_dir_override:
d = getattr(Utils.g_module, BLDDIR, None)
if d:
# test if user has set the blddir in wscript.
msg = ' Overriding build directory %s with %s' % (d, build_dir_override)
warn(msg)
Utils.g_module.blddir = build_dir_override
# bind a few methods and classes by default
def set_def(obj, name=''):
n = name or obj.__name__
if not n in Utils.g_module.__dict__:
setattr(Utils.g_module, n, obj)
for k in [dist, distclean, distcheck, clean, install, uninstall]:
set_def(k)
set_def(Configure.ConfigurationContext, 'configure_context')
for k in ['build', 'clean', 'install', 'uninstall']:
set_def(Build.BuildContext, k + '_context')
# now parse the options from the user wscript file
opt_obj = Options.Handler(Utils.g_module)
opt_obj.curdir = candidate
try:
f = Utils.g_module.set_options
except AttributeError:
pass
else:
opt_obj.sub_options([''])
opt_obj.parse_args()
if not 'init' in Utils.g_module.__dict__:
Utils.g_module.init = Utils.nada
if not 'shutdown' in Utils.g_module.__dict__:
Utils.g_module.shutdown = Utils.nada
main()
def prepare(t, cwd, ver, wafdir):
if WAFVERSION != ver:
msg = 'Version mismatch: waf %s <> wafadmin %s (wafdir %s)' % (ver, WAFVERSION, wafdir)
print('\033[91mError: %s\033[0m' % msg)
sys.exit(1)
#"""
try:
prepare_impl(t, cwd, ver, wafdir)
except Utils.WafError, e:
error(str(e))
sys.exit(1)
except KeyboardInterrupt:
Utils.pprint('RED', 'Interrupted')
sys.exit(68)
"""
import cProfile, pstats
cProfile.runctx("import Scripting; Scripting.prepare_impl(t, cwd, ver, wafdir)", {},
{'t': t, 'cwd':cwd, 'ver':ver, 'wafdir':wafdir},
'profi.txt')
p = pstats.Stats('profi.txt')
p.sort_stats('time').print_stats(45)
#"""
def main():
global commands
commands = Options.arg_line[:]
while commands:
x = commands.pop(0)
ini = datetime.datetime.now()
if x == 'configure':
fun = configure
elif x == 'build':
fun = build
else:
fun = getattr(Utils.g_module, x, None)
if not fun:
raise Utils.WscriptError('No such command %r' % x)
ctx = getattr(Utils.g_module, x + '_context', Utils.Context)()
if x in ['init', 'shutdown', 'dist', 'distclean', 'distcheck']:
# compatibility TODO remove in waf 1.6
try:
fun(ctx)
except TypeError:
fun()
else:
fun(ctx)
ela = ''
if not Options.options.progress_bar:
ela = ' (%s)' % Utils.get_elapsed_time(ini)
if x != 'init' and x != 'shutdown':
info('%r finished successfully%s' % (x, ela))
if not commands and x != 'shutdown':
commands.append('shutdown')
def configure(conf):
src = getattr(Options.options, SRCDIR, None)
if not src: src = getattr(Utils.g_module, SRCDIR, None)
if not src: src = getattr(Utils.g_module, 'top', None)
if not src:
src = '.'
incomplete_src = 1
src = os.path.abspath(src)
bld = getattr(Options.options, BLDDIR, None)
if not bld: bld = getattr(Utils.g_module, BLDDIR, None)
if not bld: bld = getattr(Utils.g_module, 'out', None)
if not bld:
bld = 'build'
incomplete_bld = 1
if bld == '.':
raise Utils.WafError('Setting blddir="." may cause distclean problems')
bld = os.path.abspath(bld)
try: os.makedirs(bld)
except OSError: pass
# It is not possible to compile specific targets in the configuration
# this may cause configuration errors if autoconfig is set
targets = Options.options.compile_targets
Options.options.compile_targets = None
Options.is_install = False
conf.srcdir = src
conf.blddir = bld
conf.post_init()
if 'incomplete_src' in vars():
conf.check_message_1('Setting srcdir to')
conf.check_message_2(src)
if 'incomplete_bld' in vars():
conf.check_message_1('Setting blddir to')
conf.check_message_2(bld)
# calling to main wscript's configure()
conf.sub_config([''])
conf.store()
# this will write a configure lock so that subsequent builds will
# consider the current path as the root directory (see prepare_impl).
# to remove: use 'waf distclean'
env = Environment.Environment()
env[BLDDIR] = bld
env[SRCDIR] = src
env['argv'] = sys.argv
env['commands'] = Options.commands
env['options'] = Options.options.__dict__
# conf.hash & conf.files hold wscript files paths and hash
# (used only by Configure.autoconfig)
env['hash'] = conf.hash
env['files'] = conf.files
env['environ'] = dict(conf.environ)
env['cwd'] = os.path.split(Utils.g_module.root_path)[0]
if Utils.g_module.root_path != src:
# in case the source dir is somewhere else
env.store(os.path.join(src, Options.lockfile))
env.store(Options.lockfile)
Options.options.compile_targets = targets
def clean(bld):
'''removes the build files'''
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Nothing to clean (project not configured)')
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
bld.is_install = 0 # False
# read the scripts - and set the path to the wscript path (useful for srcdir='/foo/bar')
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
try:
bld.clean()
finally:
bld.save()
def check_configured(bld):
if not Configure.autoconfig:
return bld
conf_cls = getattr(Utils.g_module, 'configure_context', Utils.Context)
bld_cls = getattr(Utils.g_module, 'build_context', Utils.Context)
def reconf(proj):
back = (Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose)
Options.commands = proj['commands']
Options.options.__dict__ = proj['options']
conf = conf_cls()
conf.environ = proj['environ']
configure(conf)
(Options.commands, Options.options.__dict__, Logs.zones, Logs.verbose) = back
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
conf = conf_cls()
configure(conf)
else:
try:
bld = bld_cls()
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
except Utils.WafError:
reconf(proj)
return bld_cls()
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError('Auto-config: project does not configure (bug)')
h = 0
try:
for file in proj['files']:
if file.endswith('configure'):
h = hash((h, Utils.readf(file)))
else:
mod = Utils.load_module(file)
h = hash((h, mod.waf_hash_val))
except (OSError, IOError):
warn('Reconfiguring the project: a file is unavailable')
reconf(proj)
else:
if (h != proj['hash']):
warn('Reconfiguring the project: the configuration has changed')
reconf(proj)
return bld_cls()
def install(bld):
'''installs the build files'''
bld = check_configured(bld)
Options.commands['install'] = True
Options.commands['uninstall'] = False
Options.is_install = True
bld.is_install = INSTALL
build_impl(bld)
bld.install()
def uninstall(bld):
'''removes the installed files'''
Options.commands['install'] = False
Options.commands['uninstall'] = True
Options.is_install = True
bld.is_install = UNINSTALL
try:
def runnable_status(self):
return SKIP_ME
setattr(Task.Task, 'runnable_status_back', Task.Task.runnable_status)
setattr(Task.Task, 'runnable_status', runnable_status)
build_impl(bld)
bld.install()
finally:
setattr(Task.Task, 'runnable_status', Task.Task.runnable_status_back)
def build(bld):
bld = check_configured(bld)
Options.commands['install'] = False
Options.commands['uninstall'] = False
Options.is_install = False
bld.is_install = 0 # False
return build_impl(bld)
def build_impl(bld):
# compile the project and/or install the files
try:
proj = Environment.Environment(Options.lockfile)
except IOError:
raise Utils.WafError("Project not configured (run 'waf configure' first)")
bld.load_dirs(proj[SRCDIR], proj[BLDDIR])
bld.load_envs()
info("Waf: Entering directory `%s'" % bld.bldnode.abspath())
bld.add_subdirs([os.path.split(Utils.g_module.root_path)[0]])
# execute something immediately before the build starts
bld.pre_build()
try:
bld.compile()
finally:
if Options.options.progress_bar: print('')
info("Waf: Leaving directory `%s'" % bld.bldnode.abspath())
# execute something immediately after a successful build
bld.post_build()
bld.install()
excludes = '.bzr .bzrignore .git .gitignore .svn CVS .cvsignore .arch-ids {arch} SCCS BitKeeper .hg _MTN _darcs Makefile Makefile.in config.log .gitattributes .hgignore .hgtags'.split()
dist_exts = '~ .rej .orig .pyc .pyo .bak .tar.bz2 tar.gz .zip .swp'.split()
def dont_dist(name, src, build_dir):
global excludes, dist_exts
if (name.startswith(',,')
or name.startswith('++')
or name.startswith('.waf')
or (src == '.' and name == Options.lockfile)
or name in excludes
or name == build_dir
):
return True
for ext in dist_exts:
if name.endswith(ext):
return True
return False
# like shutil.copytree
# exclude files and to raise exceptions immediately
def copytree(src, dst, build_dir):
names = os.listdir(src)
os.makedirs(dst)
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if dont_dist(name, src, build_dir):
continue
if os.path.isdir(srcname):
copytree(srcname, dstname, build_dir)
else:
shutil.copy2(srcname, dstname)
# TODO in waf 1.6, change this method if "srcdir == blddir" is allowed
def distclean(ctx=None):
'''removes the build directory'''
global commands
lst = os.listdir('.')
for f in lst:
if f == Options.lockfile:
try:
proj = Environment.Environment(f)
except:
Logs.warn('could not read %r' % f)
continue
try:
shutil.rmtree(proj[BLDDIR])
except IOError:
pass
except OSError, e:
if e.errno != errno.ENOENT:
Logs.warn('project %r cannot be removed' % proj[BLDDIR])
try:
os.remove(f)
except OSError, e:
if e.errno != errno.ENOENT:
Logs.warn('file %r cannot be removed' % f)
# remove the local waf cache
if not commands and f.startswith('.waf'):
shutil.rmtree(f, ignore_errors=True)
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
def dist(appname='', version=''):
'''makes a tarball for redistributing the sources'''
# return return (distdirname, tarballname)
import tarfile
if not appname: appname = Utils.g_module.APPNAME
if not version: version = Utils.g_module.VERSION
tmp_folder = appname + '-' + version
if g_gz in ['gz', 'bz2']:
arch_name = tmp_folder + '.tar.' + g_gz
else:
arch_name = tmp_folder + '.' + 'zip'
# remove the previous dir
try:
shutil.rmtree(tmp_folder)
except (OSError, IOError):
pass
# remove the previous archive
try:
os.remove(arch_name)
except (OSError, IOError):
pass
# copy the files into the temporary folder
blddir = getattr(Utils.g_module, BLDDIR, None)
if not blddir:
blddir = getattr(Utils.g_module, 'out', None)
copytree('.', tmp_folder, blddir)
# undocumented hook for additional cleanup
dist_hook = getattr(Utils.g_module, 'dist_hook', None)
if dist_hook:
back = os.getcwd()
os.chdir(tmp_folder)
try:
dist_hook()
finally:
# go back to the root directory
os.chdir(back)
if g_gz in ['gz', 'bz2']:
tar = tarfile.open(arch_name, 'w:' + g_gz)
tar.add(tmp_folder)
tar.close()
else:
Utils.zip_folder(tmp_folder, arch_name, tmp_folder)
try: from hashlib import sha1 as sha
except ImportError: from sha import sha
try:
digest = " (sha=%r)" % sha(Utils.readf(arch_name)).hexdigest()
except:
digest = ''
info('New archive created: %s%s' % (arch_name, digest))
if os.path.exists(tmp_folder): shutil.rmtree(tmp_folder)
return arch_name
# FIXME waf 1.6 a unique ctx parameter, and remove the optional appname and version
def distcheck(appname='', version='', subdir=''):
'''checks if the sources compile (tarball from 'dist')'''
import tempfile, tarfile
if not appname: appname = Utils.g_module.APPNAME
if not version: version = Utils.g_module.VERSION
waf = os.path.abspath(sys.argv[0])
tarball = dist(appname, version)
path = appname + '-' + version
# remove any previous instance
if os.path.exists(path):
shutil.rmtree(path)
t = tarfile.open(tarball)
for x in t: t.extract(x)
t.close()
# build_path is the directory for the waf invocation
if subdir:
build_path = os.path.join(path, subdir)
else:
build_path = path
instdir = tempfile.mkdtemp('.inst', '%s-%s' % (appname, version))
ret = Utils.pproc.Popen([waf, 'configure', 'build', 'install', 'uninstall', '--destdir=' + instdir], cwd=build_path).wait()
if ret:
raise Utils.WafError('distcheck failed with code %i' % ret)
if os.path.exists(instdir):
raise Utils.WafError('distcheck succeeded, but files were left in %s' % instdir)
shutil.rmtree(path)
# FIXME remove in Waf 1.6 (kept for compatibility)
def add_subdir(dir, bld):
bld.recurse(dir, 'build')

1171
tools/wafadmin/Task.py

File diff suppressed because it is too large

588
tools/wafadmin/TaskGen.py

@ -1,588 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"""
The class task_gen encapsulates the creation of task objects (low-level code)
The instances can have various parameters, but the creation of task nodes (Task.py)
is delayed. To achieve this, various methods are called from the method "apply"
The class task_gen contains lots of methods, and a configuration table:
* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
* the order of the methods (self.prec or by default task_gen.prec) is configurable
* new methods can be inserted dynamically without pasting old code
Additionally, task_gen provides the method apply_core
* file extensions are mapped to methods: def meth(self, name_or_node)
* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
* when called, the functions may modify self.allnodes to re-add source to process
* the mappings can map an extension or a filename (see the code below)
WARNING: subclasses must reimplement the clone method
"""
import os, traceback, copy
import Build, Task, Utils, Logs, Options
from Logs import debug, error, warn
from Constants import *
typos = {
'sources':'source',
'targets':'target',
'include':'includes',
'define':'defines',
'importpath':'importpaths',
'install_var':'install_path',
'install_subdir':'install_path',
'inst_var':'install_path',
'inst_dir':'install_path',
'feature':'features',
}
class register_obj(type):
"""no decorators for classes, so we use a metaclass
we store into task_gen.classes the classes that inherit task_gen
and whose names end in '_taskgen'
"""
def __init__(cls, name, bases, dict):
super(register_obj, cls).__init__(name, bases, dict)
name = cls.__name__
suffix = '_taskgen'
if name.endswith(suffix):
task_gen.classes[name.replace(suffix, '')] = cls
class task_gen(object):
"""
Most methods are of the form 'def meth(self):' without any parameters
there are many of them, and they do many different things:
* task creation
* task results installation
* environment modification
* attribute addition/removal
The inheritance approach is complicated
* mixing several languages at once
* subclassing is needed even for small changes
* inserting new methods is complicated
This new class uses a configuration table:
* adding new methods easily
* obtaining the order in which to call the methods
* postponing the method calls (post() -> apply)
Additionally, a 'traits' static attribute is provided:
* this list contains methods
* the methods can remove or add methods from self.meths
Example1: the attribute 'staticlib' is set on an instance
a method set in the list of traits is executed when the
instance is posted, it finds that flag and adds another method for execution
Example2: a method set in the list of traits finds the msvc
compiler (from self.env['MSVC']==1); more methods are added to self.meths
"""
__metaclass__ = register_obj
mappings = {}
mapped = {}
prec = Utils.DefaultDict(list)
traits = Utils.DefaultDict(set)
classes = {}
def __init__(self, *kw, **kwargs):
self.prec = Utils.DefaultDict(list)
"map precedence of function names to call"
# so we will have to play with directed acyclic graphs
# detect cycles, etc
self.source = ''
self.target = ''
# list of methods to execute - does not touch it by hand unless you know
self.meths = []
# list of mappings extension -> function
self.mappings = {}
# list of features (see the documentation on traits)
self.features = list(kw)
# not always a good idea
self.tasks = []
self.default_chmod = O644
self.default_install_path = None
# kind of private, beware of what you put in it, also, the contents are consumed
self.allnodes = []
self.bld = kwargs.get('bld', Build.bld)
self.env = self.bld.env.copy()
self.path = self.bld.path # emulate chdir when reading scripts
self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
# provide a unique id
self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
for key, val in kwargs.iteritems():
setattr(self, key, val)
self.bld.task_manager.add_task_gen(self)
self.bld.all_task_gen.append(self)
def __str__(self):
return ("<task_gen '%s' of type %s defined in %s>"
% (self.name or self.target, self.__class__.__name__, str(self.path)))
def __setattr__(self, name, attr):
real = typos.get(name, name)
if real != name:
warn('typo %s -> %s' % (name, real))
if Logs.verbose > 0:
traceback.print_stack()
object.__setattr__(self, real, attr)
def to_list(self, value):
"helper: returns a list"
if isinstance(value, str): return value.split()
else: return value
def apply(self):
"order the methods to execute using self.prec or task_gen.prec"
keys = set(self.meths)
# add the methods listed in the features
self.features = Utils.to_list(self.features)
for x in self.features + ['*']:
st = task_gen.traits[x]
if not st:
warn('feature %r does not exist - bind at least one method to it' % x)
keys.update(st)
# copy the precedence table
prec = {}
prec_tbl = self.prec or task_gen.prec
for x in prec_tbl:
if x in keys:
prec[x] = prec_tbl[x]
# elements disconnected
tmp = []
for a in keys:
for x in prec.values():
if a in x: break
else:
tmp.append(a)
# topological sort
out = []
while tmp:
e = tmp.pop()
if e in keys: out.append(e)
try:
nlst = prec[e]
except KeyError:
pass
else:
del prec[e]
for x in nlst:
for y in prec:
if x in prec[y]:
break
else:
tmp.append(x)
if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
out.reverse()
self.meths = out
# then we run the methods in order
debug('task_gen: posting %s %d', self, id(self))
for x in out:
try:
v = getattr(self, x)
except AttributeError:
raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
debug('task_gen: -> %s (%d)', x, id(self))
v()
def post(self):
"runs the code to create the tasks, do not subclass"
if not self.name:
if isinstance(self.target, list):
self.name = ' '.join(self.target)
else:
self.name = self.target
if getattr(self, 'posted', None):
#error("OBJECT ALREADY POSTED" + str( self))
return
self.apply()
debug('task_gen: posted %s', self.name)
self.posted = True
def get_hook(self, ext):
try: return self.mappings[ext]
except KeyError:
try: return task_gen.mappings[ext]
except KeyError: return None
# TODO waf 1.6: always set the environment
# TODO waf 1.6: create_task(self, name, inputs, outputs)
def create_task(self, name, src=None, tgt=None, env=None):
env = env or self.env
task = Task.TaskBase.classes[name](env.copy(), generator=self)
if src:
task.set_inputs(src)
if tgt:
task.set_outputs(tgt)
self.tasks.append(task)
return task
def name_to_obj(self, name):
return self.bld.name_to_obj(name, self.env)
def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
"""
The attributes "excludes" and "exts" must be lists to avoid the confusion
find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
do not use absolute paths
do not use paths outside of the source tree
the files or folder beginning by . are not returned
# TODO: remove in Waf 1.6
"""
err_msg = "'%s' attribute must be a list"
if not isinstance(excludes, list):
raise Utils.WscriptError(err_msg % 'excludes')
if not isinstance(exts, list):
raise Utils.WscriptError(err_msg % 'exts')
lst = []
#make sure dirnames is a list helps with dirnames with spaces
dirnames = self.to_list(dirnames)
ext_lst = exts or list(self.mappings.keys()) + list(task_gen.mappings.keys())
for name in dirnames:
anode = self.path.find_dir(name)
if not anode or not anode.is_child_of(self.bld.srcnode):
raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
", or it's not child of '%s'." % (name, self.bld.srcnode))
self.bld.rescan(anode)
for name in self.bld.cache_dir_contents[anode.id]:
# ignore hidden files
if name.startswith('.'):
continue
(base, ext) = os.path.splitext(name)
if ext in ext_lst and not name in lst and not name in excludes:
lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
lst.sort()
self.source = self.to_list(self.source)
if not self.source: self.source = lst
else: self.source += lst
def clone(self, env):
""
newobj = task_gen(bld=self.bld)
for x in self.__dict__:
if x in ['env', 'bld']:
continue
elif x in ["path", "features"]:
setattr(newobj, x, getattr(self, x))
else:
setattr(newobj, x, copy.copy(getattr(self, x)))
newobj.__class__ = self.__class__
if isinstance(env, str):
newobj.env = self.bld.all_envs[env].copy()
else:
newobj.env = env.copy()
return newobj
def get_inst_path(self):
return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
def set_inst_path(self, val):
self._install_path = val
install_path = property(get_inst_path, set_inst_path)
def get_chmod(self):
return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
def set_chmod(self, val):
self._chmod = val
chmod = property(get_chmod, set_chmod)
def declare_extension(var, func):
try:
for x in Utils.to_list(var):
task_gen.mappings[x] = func
except:
raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
task_gen.mapped[func.__name__] = func
def declare_order(*k):
assert(len(k) > 1)
n = len(k) - 1
for i in xrange(n):
f1 = k[i]
f2 = k[i+1]
if not f1 in task_gen.prec[f2]:
task_gen.prec[f2].append(f1)
def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=True, color='BLUE',
install=0, before=[], after=[], decider=None, rule=None, scan=None):
"""
see Tools/flex.py for an example
while i do not like such wrappers, some people really do
"""
action = action or rule
if isinstance(action, str):
act = Task.simple_task_type(name, action, color=color)
else:
act = Task.task_type_from_func(name, action, color=color)
act.ext_in = tuple(Utils.to_list(ext_in))
act.ext_out = tuple(Utils.to_list(ext_out))
act.before = Utils.to_list(before)
act.after = Utils.to_list(after)
act.scan = scan
def x_file(self, node):
if decider:
ext = decider(self, node)
else:
ext = ext_out
if isinstance(ext, str):
out_source = node.change_ext(ext)
if reentrant:
self.allnodes.append(out_source)
elif isinstance(ext, list):
out_source = [node.change_ext(x) for x in ext]
if reentrant:
for i in xrange((reentrant is True) and len(out_source) or reentrant):
self.allnodes.append(out_source[i])
else:
# XXX: useless: it will fail on Utils.to_list above...
raise Utils.WafError("do not know how to process %s" % str(ext))
tsk = self.create_task(name, node, out_source)
if node.__class__.bld.is_install:
tsk.install = install
declare_extension(act.ext_in, x_file)
def bind_feature(name, methods):
lst = Utils.to_list(methods)
task_gen.traits[name].update(lst)
"""
All the following decorators are registration decorators, i.e add an attribute to current class
(task_gen and its derivatives), with same name as func, which points to func itself.
For example:
@taskgen
def sayHi(self):
print("hi")
Now taskgen.sayHi() may be called
If python were really smart, it could infer itself the order of methods by looking at the
attributes. A prerequisite for execution is to have the attribute set before.
Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
"""
def taskgen(func):
setattr(task_gen, func.__name__, func)
return func
def feature(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for name in k:
task_gen.traits[name].update([func.__name__])
return func
return deco
def before(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
return func
return deco
def after(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
return func
return deco
def extension(var):
def deco(func):
setattr(task_gen, func.__name__, func)
try:
for x in Utils.to_list(var):
task_gen.mappings[x] = func
except:
raise Utils.WafError('extension takes either a list or a string %r' % var)
task_gen.mapped[func.__name__] = func
return func
return deco
# TODO make certain the decorators may be used here
def apply_core(self):
"""Process the attribute source
transform the names into file nodes
try to process the files by name first, later by extension"""
# get the list of folders to use by the scanners
# all our objects share the same include paths anyway
find_resource = self.path.find_resource
for filename in self.to_list(self.source):
# if self.mappings or task_gen.mappings contains a file of the same name
x = self.get_hook(filename)
if x:
x(self, filename)
else:
node = find_resource(filename)
if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
self.allnodes.append(node)
for node in self.allnodes:
# self.mappings or task_gen.mappings map the file extension to a function
x = self.get_hook(node.suffix())
if not x:
raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
(str(node), self.__class__.mappings.keys(), self.__class__))
x(self, node)
feature('*')(apply_core)
def exec_rule(self):
"""Process the attribute rule, when provided the method apply_core will be disabled
"""
if not getattr(self, 'rule', None):
return
# someone may have removed it already
try:
self.meths.remove('apply_core')
except ValueError:
pass
# get the function and the variables
func = self.rule
vars2 = []
if isinstance(func, str):
# use the shell by default for user-defined commands
(func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
func.code = self.rule
# create the task class
name = getattr(self, 'name', None) or self.target or self.rule
if not isinstance(name, str):
name = str(self.idx)
cls = Task.task_type_from_func(name, func, getattr(self, 'vars', vars2))
# now create one instance
tsk = self.create_task(name)
dep_vars = getattr(self, 'dep_vars', ['ruledeps'])
if dep_vars:
tsk.dep_vars = dep_vars
if isinstance(self.rule, str):
tsk.env.ruledeps = self.rule
else:
# only works if the function is in a global module such as a waf tool
tsk.env.ruledeps = Utils.h_fun(self.rule)
# we assume that the user knows that without inputs or outputs
#if not getattr(self, 'target', None) and not getattr(self, 'source', None):
# cls.quiet = True
if getattr(self, 'target', None):
cls.quiet = True
tsk.outputs = [self.path.find_or_declare(x) for x in self.to_list(self.target)]
if getattr(self, 'source', None):
cls.quiet = True
tsk.inputs = []
for x in self.to_list(self.source):
y = self.path.find_resource(x)
if not y:
raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
tsk.inputs.append(y)
if self.allnodes:
tsk.inputs.extend(self.allnodes)
if getattr(self, 'scan', None):
cls.scan = self.scan
if getattr(self, 'install_path', None):
tsk.install_path = self.install_path
if getattr(self, 'cwd', None):
tsk.cwd = self.cwd
if getattr(self, 'on_results', None):
Task.update_outputs(cls)
if getattr(self, 'always', None):
Task.always_run(cls)
for x in ['after', 'before', 'ext_in', 'ext_out']:
setattr(cls, x, getattr(self, x, []))
feature('*')(exec_rule)
before('apply_core')(exec_rule)
def sequence_order(self):
"""
add a strict sequential constraint between the tasks generated by task generators
it uses the fact that task generators are posted in order
it will not post objects which belong to other folders
there is also an awesome trick for executing the method in last position
to use:
bld(features='javac seq')
bld(features='jar seq')
to start a new sequence, set the attribute seq_start, for example:
obj.seq_start = True
"""
if self.meths and self.meths[-1] != 'sequence_order':
self.meths.append('sequence_order')
return
if getattr(self, 'seq_start', None):
return
# all the tasks previously declared must be run before these
if getattr(self.bld, 'prev', None):
self.bld.prev.post()
for x in self.bld.prev.tasks:
for y in self.tasks:
y.set_run_after(x)
self.bld.prev = self
feature('seq')(sequence_order)

4
tools/wafadmin/Tools/__init__.py

@ -1,4 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)

36
tools/wafadmin/Tools/ar.py

@ -1,36 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
# Ralf Habacker, 2006 (rh)
"ar and ranlib"
import os, sys
import Task, Utils
from Configure import conftest
ar_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
cls = Task.simple_task_type('static_link', ar_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada
# remove the output in case it already exists
old = cls.run
def wrap(self):
try: os.remove(self.outputs[0].abspath(self.env))
except OSError: pass
return old(self)
setattr(cls, 'run', wrap)
def detect(conf):
conf.find_program('ar', var='AR')
conf.find_program('ranlib', var='RANLIB')
conf.env.ARFLAGS = 'rcs'
@conftest
def find_ar(conf):
v = conf.env
conf.check_tool('ar')
if not v['AR']: conf.fatal('ar is required for static libraries - not found')

100
tools/wafadmin/Tools/cc.py

@ -1,100 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"Base for c programs/libraries"
import os
import TaskGen, Build, Utils, Task
from Logs import debug
import ccroot
from TaskGen import feature, before, extension, after
g_cc_flag_vars = [
'CCDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CCDEFINES']
EXT_CC = ['.c']
g_cc_type_vars = ['CCFLAGS', 'LINKFLAGS']
# TODO remove in waf 1.6
class cc_taskgen(ccroot.ccroot_abstract):
pass
@feature('cc')
@before('apply_type_vars')
@after('default_cc')
def init_cc(self):
self.p_flag_vars = set(self.p_flag_vars).union(g_cc_flag_vars)
self.p_type_vars = set(self.p_type_vars).union(g_cc_type_vars)
if not self.env['CC_NAME']:
raise Utils.WafError("At least one compiler (gcc, ..) must be selected")
@feature('cc')
@after('apply_incpaths')
def apply_obj_vars_cc(self):
"""after apply_incpaths for INC_PATHS"""
env = self.env
app = env.append_unique
cpppath_st = env['CPPPATH_ST']
# local flags come first
# set the user-defined includes paths
for i in env['INC_PATHS']:
app('_CCINCFLAGS', cpppath_st % i.bldpath(env))
app('_CCINCFLAGS', cpppath_st % i.srcpath(env))
# set the library include paths
for i in env['CPPPATH']:
app('_CCINCFLAGS', cpppath_st % i)
@feature('cc')
@after('apply_lib_vars')
def apply_defines_cc(self):
"""after uselib is set for CCDEFINES"""
self.defines = getattr(self, 'defines', [])
lst = self.to_list(self.defines) + self.to_list(self.env['CCDEFINES'])
milst = []
# now process the local defines
for defi in lst:
if not defi in milst:
milst.append(defi)
# CCDEFINES_
libs = self.to_list(self.uselib)
for l in libs:
val = self.env['CCDEFINES_'+l]
if val: milst += val
self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
y = self.env['CCDEFINES_ST']
self.env['_CCDEFFLAGS'] = [y%x for x in milst]
@extension(EXT_CC)
def c_hook(self, node):
# create the compilation task: cpp or cc
if getattr(self, 'obj_ext', None):
obj_ext = self.obj_ext
else:
obj_ext = '_%d.o' % self.idx
task = self.create_task('cc', node, node.change_ext(obj_ext))
try:
self.compiled_tasks.append(task)
except AttributeError:
raise Utils.WafError('Have you forgotten to set the feature "cc" on %s?' % str(self))
return task
cc_str = '${CC} ${CCFLAGS} ${CPPFLAGS} ${_CCINCFLAGS} ${_CCDEFFLAGS} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT}'
cls = Task.simple_task_type('cc', cc_str, 'GREEN', ext_out='.o', ext_in='.c', shell=False)
cls.scan = ccroot.scan
cls.vars.append('CCDEPS')
link_str = '${LINK_CC} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
cls = Task.simple_task_type('cc_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada

625
tools/wafadmin/Tools/ccroot.py

@ -1,625 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"base for all c/c++ programs and libraries"
import os, sys, re
import TaskGen, Task, Utils, preproc, Logs, Build, Options
from Logs import error, debug, warn
from Utils import md5
from TaskGen import taskgen, after, before, feature
from Constants import *
from Configure import conftest
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import config_c # <- necessary for the configuration, do not touch
USE_TOP_LEVEL = False
def get_cc_version(conf, cc, gcc=False, icc=False):
cmd = cc + ['-dM', '-E', '-']
try:
p = Utils.pproc.Popen(cmd, stdin=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
p.stdin.write('\n')
out = p.communicate()[0]
except:
conf.fatal('could not determine the compiler version %r' % cmd)
# PY3K: do not touch
out = str(out)
if gcc:
if out.find('__INTEL_COMPILER') >= 0:
conf.fatal('The intel compiler pretends to be gcc')
if out.find('__GNUC__') < 0:
conf.fatal('Could not determine the compiler type')
if icc and out.find('__INTEL_COMPILER') < 0:
conf.fatal('Not icc/icpc')
k = {}
if icc or gcc:
out = out.split('\n')
import shlex
for line in out:
lst = shlex.split(line)
if len(lst)>2:
key = lst[1]
val = lst[2]
k[key] = val
def isD(var):
return var in k
def isT(var):
return var in k and k[var] != '0'
# Some documentation is available at http://predef.sourceforge.net
# The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
mp1 = {
'__linux__' : 'linux',
'__GNU__' : 'hurd',
'__FreeBSD__' : 'freebsd',
'__NetBSD__' : 'netbsd',
'__OpenBSD__' : 'openbsd',
'__sun' : 'sunos',
'__hpux' : 'hpux',
'__sgi' : 'irix',
'_AIX' : 'aix',
'__CYGWIN__' : 'cygwin',
'__MSYS__' : 'msys',
'_UWIN' : 'uwin',
'_WIN64' : 'win32',
'_WIN32' : 'win32',
}
for i in mp1:
if isD(i):
conf.env.DEST_OS = mp1[i]
break
else:
if isD('__APPLE__') and isD('__MACH__'):
conf.env.DEST_OS = 'darwin'
elif isD('__unix__'): # unix must be tested last as it's a generic fallback
conf.env.DEST_OS = 'generic'
if isD('__ELF__'):
conf.env.DEST_BINFMT = 'elf'
mp2 = {
'__x86_64__' : 'x86_64',
'__i386__' : 'x86',
'__ia64__' : 'ia',
'__mips__' : 'mips',
'__sparc__' : 'sparc',
'__alpha__' : 'alpha',
'__arm__' : 'arm',
'__hppa__' : 'hppa',
'__powerpc__' : 'powerpc',
}
for i in mp2:
if isD(i):
conf.env.DEST_CPU = mp2[i]
break
debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
conf.env['CC_VERSION'] = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
return k
class DEBUG_LEVELS:
"""Will disappear in waf 1.6"""
ULTRADEBUG = "ultradebug"
DEBUG = "debug"
RELEASE = "release"
OPTIMIZED = "optimized"
CUSTOM = "custom"
ALL = [ULTRADEBUG, DEBUG, RELEASE, OPTIMIZED, CUSTOM]
def scan(self):
"look for .h the .cpp need"
debug('ccroot: _scan_preprocessor(self, node, env, path_lst)')
# TODO waf 1.6 - assume the default input has exactly one file
if len(self.inputs) == 1:
node = self.inputs[0]
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
return (nodes, names)
all_nodes = []
all_names = []
seen = set()
for node in self.inputs:
(nodes, names) = preproc.get_deps(node, self.env, nodepaths = self.env['INC_PATHS'])
if Logs.verbose:
debug('deps: deps for %s: %r; unresolved %r', str(node), nodes, names)
for x in nodes:
if id(x) in seen: continue
seen.add(id(x))
all_nodes.append(x)
for x in names:
if not x in all_names:
all_names.append(x)
return (all_nodes, all_names)
class ccroot_abstract(TaskGen.task_gen):
"Parent class for programs and libraries in languages c, c++ and moc (Qt)"
def __init__(self, *k, **kw):
# COMPAT remove in waf 1.6 TODO
if len(k) > 1:
k = list(k)
if k[1][0] != 'c':
k[1] = 'c' + k[1]
TaskGen.task_gen.__init__(self, *k, **kw)
def get_target_name(self):
tp = 'program'
for x in self.features:
if x in ['cshlib', 'cstaticlib']:
tp = x.lstrip('c')
pattern = self.env[tp + '_PATTERN']
if not pattern: pattern = '%s'
dir, name = os.path.split(self.target)
if self.env.DEST_BINFMT == 'pe' and getattr(self, 'vnum', None) and 'cshlib' in self.features:
# include the version in the dll file name,
# the import lib file name stays unversionned.
name = name + '-' + self.vnum.split('.')[0]
return os.path.join(dir, pattern % name)
@feature('cc', 'cxx')
@before('apply_core')
def default_cc(self):
"""compiled_tasks attribute must be set before the '.c->.o' tasks can be created"""
Utils.def_attrs(self,
includes = '',
defines= '',
rpaths = '',
uselib = '',
uselib_local = '',
add_objects = '',
p_flag_vars = [],
p_type_vars = [],
compiled_tasks = [],
link_task = None)
# The only thing we need for cross-compilation is DEST_BINFMT.
# At some point, we may reach a case where DEST_BINFMT is not enough, but for now it's sufficient.
# Currently, cross-compilation is auto-detected only for the gnu and intel compilers.
if not self.env.DEST_BINFMT:
# Infer the binary format from the os name.
self.env.DEST_BINFMT = Utils.unversioned_sys_platform_to_binary_format(
self.env.DEST_OS or Utils.unversioned_sys_platform())
if not self.env.BINDIR: self.env.BINDIR = Utils.subst_vars('${PREFIX}/bin', self.env)
if not self.env.LIBDIR: self.env.LIBDIR = Utils.subst_vars('${PREFIX}/lib${LIB_EXT}', self.env)
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
def apply_verif(self):
"""no particular order, used for diagnostic"""
if not (self.source or getattr(self, 'add_objects', None) or getattr(self, 'uselib_local', None) or getattr(self, 'obj_files', None)):
raise Utils.WafError('no source files specified for %s' % self)
if not self.target:
raise Utils.WafError('no target for %s' % self)
# TODO reference the d programs, shlibs in d.py, not here
@feature('cprogram', 'dprogram')
@after('default_cc')
@before('apply_core')
def vars_target_cprogram(self):
self.default_install_path = self.env.BINDIR
self.default_chmod = O755
@after('default_cc')
@feature('cshlib', 'dshlib')
@before('apply_core')
def vars_target_cshlib(self):
if self.env.DEST_BINFMT == 'pe':
# set execute bit on libs to avoid 'permission denied' (issue 283)
self.default_chmod = O755
self.default_install_path = self.env.BINDIR
else:
self.default_install_path = self.env.LIBDIR
@feature('cprogram', 'dprogram', 'cstaticlib', 'dstaticlib', 'cshlib', 'dshlib')
@after('apply_link', 'vars_target_cprogram', 'vars_target_cshlib')
def default_link_install(self):
"""you may kill this method to inject your own installation for the first element
any other install should only process its own nodes and not those from the others"""
if self.install_path:
self.bld.install_files(self.install_path, self.link_task.outputs[0], env=self.env, chmod=self.chmod)
@feature('cc', 'cxx')
@after('apply_type_vars', 'apply_lib_vars', 'apply_core')
def apply_incpaths(self):
"""used by the scanner
after processing the uselib for CPPPATH
after apply_core because some processing may add include paths
"""
lst = []
# TODO move the uselib processing out of here
for lib in self.to_list(self.uselib):
for path in self.env['CPPPATH_' + lib]:
if not path in lst:
lst.append(path)
if preproc.go_absolute:
for path in preproc.standard_includes:
if not path in lst:
lst.append(path)
for path in self.to_list(self.includes):
if not path in lst:
if preproc.go_absolute or not os.path.isabs(path):
lst.append(path)
else:
self.env.prepend_value('CPPPATH', path)
for path in lst:
node = None
if os.path.isabs(path):
if preproc.go_absolute:
node = self.bld.root.find_dir(path)
elif path[0] == '#':
node = self.bld.srcnode
if len(path) > 1:
node = node.find_dir(path[1:])
else:
node = self.path.find_dir(path)
if node:
self.env.append_value('INC_PATHS', node)
# TODO WAF 1.6
if USE_TOP_LEVEL:
self.env.append_value('INC_PATHS', self.bld.srcnode)
@feature('cc', 'cxx')
@after('init_cc', 'init_cxx')
@before('apply_lib_vars')
def apply_type_vars(self):
"""before apply_lib_vars because we modify uselib
after init_cc and init_cxx because web need p_type_vars
"""
for x in self.features:
if not x in ['cprogram', 'cstaticlib', 'cshlib']:
continue
x = x.lstrip('c')
# if the type defines uselib to add, add them
st = self.env[x + '_USELIB']
if st: self.uselib = self.uselib + ' ' + st
# each compiler defines variables like 'shlib_CXXFLAGS', 'shlib_LINKFLAGS', etc
# so when we make a task generator of the type shlib, CXXFLAGS are modified accordingly
for var in self.p_type_vars:
compvar = '%s_%s' % (x, var)
#print compvar
value = self.env[compvar]
if value: self.env.append_value(var, value)
@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_core')
def apply_link(self):
"""executes after apply_core for collecting 'compiled_tasks'
use a custom linker if specified (self.link='name-of-custom-link-task')"""
link = getattr(self, 'link', None)
if not link:
if 'cstaticlib' in self.features: link = 'static_link'
elif 'cxx' in self.features: link = 'cxx_link'
else: link = 'cc_link'
tsk = self.create_task(link)
outputs = [t.outputs[0] for t in self.compiled_tasks]
tsk.set_inputs(outputs)
tsk.set_outputs(self.path.find_or_declare(get_target_name(self)))
self.link_task = tsk
@feature('cc', 'cxx')
@after('apply_link', 'init_cc', 'init_cxx', 'apply_core')
def apply_lib_vars(self):
"""after apply_link because of 'link_task'
after default_cc because of the attribute 'uselib'"""
# after 'apply_core' in case if 'cc' if there is no link
env = self.env
# 1. the case of the libs defined in the project (visit ancestors first)
# the ancestors external libraries (uselib) will be prepended
self.uselib = self.to_list(self.uselib)
names = self.to_list(self.uselib_local)
seen = set([])
tmp = Utils.deque(names) # consume a copy of the list of names
while tmp:
lib_name = tmp.popleft()
# visit dependencies only once
if lib_name in seen:
continue
y = self.name_to_obj(lib_name)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
y.post()
seen.add(lib_name)
# object has ancestors to process (shared libraries): add them to the end of the list
if getattr(y, 'uselib_local', None):
lst = y.to_list(y.uselib_local)
if 'cshlib' in y.features or 'cprogram' in y.features:
lst = [x for x in lst if not 'cstaticlib' in self.name_to_obj(x).features]
tmp.extend(lst)
# link task and flags
if getattr(y, 'link_task', None):
link_name = y.target[y.target.rfind(os.sep) + 1:]
if 'cstaticlib' in y.features:
env.append_value('STATICLIB', link_name)
elif 'cshlib' in y.features or 'cprogram' in y.features:
# WARNING some linkers can link against programs
env.append_value('LIB', link_name)
# the order
self.link_task.set_run_after(y.link_task)
# for the recompilation
dep_nodes = getattr(self.link_task, 'dep_nodes', [])
self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
# add the link path too
tmp_path = y.link_task.outputs[0].parent.bldpath(self.env)
if not tmp_path in env['LIBPATH']: env.prepend_value('LIBPATH', tmp_path)
# add ancestors uselib too - but only propagate those that have no staticlib
for v in self.to_list(y.uselib):
if not env['STATICLIB_' + v]:
if not v in self.uselib:
self.uselib.insert(0, v)
# if the library task generator provides 'export_incdirs', add to the include path
# the export_incdirs must be a list of paths relative to the other library
if getattr(y, 'export_incdirs', None):
for x in self.to_list(y.export_incdirs):
node = y.path.find_dir(x)
if not node:
raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
self.env.append_unique('INC_PATHS', node)
# 2. the case of the libs defined outside
for x in self.uselib:
for v in self.p_flag_vars:
val = self.env[v + '_' + x]
if val: self.env.append_value(v, val)
@feature('cprogram', 'cstaticlib', 'cshlib')
@after('init_cc', 'init_cxx', 'apply_link')
def apply_objdeps(self):
"add the .o files produced by some other object files in the same manner as uselib_local"
if not getattr(self, 'add_objects', None): return
seen = []
names = self.to_list(self.add_objects)
while names:
x = names[0]
# visit dependencies only once
if x in seen:
names = names[1:]
continue
# object does not exist ?
y = self.name_to_obj(x)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by add_objects %r)' % (x, self.name))
# object has ancestors to process first ? update the list of names
if getattr(y, 'add_objects', None):
added = 0
lst = y.to_list(y.add_objects)
lst.reverse()
for u in lst:
if u in seen: continue
added = 1
names = [u]+names
if added: continue # list of names modified, loop
# safe to process the current object
y.post()
seen.append(x)
for t in y.compiled_tasks:
self.link_task.inputs.extend(t.outputs)
@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_lib_vars')
def apply_obj_vars(self):
"""after apply_lib_vars for uselib"""
v = self.env
lib_st = v['LIB_ST']
staticlib_st = v['STATICLIB_ST']
libpath_st = v['LIBPATH_ST']
staticlibpath_st = v['STATICLIBPATH_ST']
rpath_st = v['RPATH_ST']
app = v.append_unique
if v['FULLSTATIC']:
v.append_value('LINKFLAGS', v['FULLSTATIC_MARKER'])
for i in v['RPATH']:
if i and rpath_st:
app('LINKFLAGS', rpath_st % i)
for i in v['LIBPATH']:
app('LINKFLAGS', libpath_st % i)
app('LINKFLAGS', staticlibpath_st % i)
if v['STATICLIB']:
v.append_value('LINKFLAGS', v['STATICLIB_MARKER'])
k = [(staticlib_st % i) for i in v['STATICLIB']]
app('LINKFLAGS', k)
# fully static binaries ?
if not v['FULLSTATIC']:
if v['STATICLIB'] or v['LIB']:
v.append_value('LINKFLAGS', v['SHLIB_MARKER'])
app('LINKFLAGS', [lib_st % i for i in v['LIB']])
@after('apply_link')
def process_obj_files(self):
if not hasattr(self, 'obj_files'): return
for x in self.obj_files:
node = self.path.find_resource(x)
self.link_task.inputs.append(node)
@taskgen
def add_obj_file(self, file):
"""Small example on how to link object files as if they were source
obj = bld.create_obj('cc')
obj.add_obj_file('foo.o')"""
if not hasattr(self, 'obj_files'): self.obj_files = []
if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
self.obj_files.append(file)
c_attrs = {
'cxxflag' : 'CXXFLAGS',
'cflag' : 'CCFLAGS',
'ccflag' : 'CCFLAGS',
'linkflag' : 'LINKFLAGS',
'ldflag' : 'LINKFLAGS',
'lib' : 'LIB',
'libpath' : 'LIBPATH',
'staticlib': 'STATICLIB',
'staticlibpath': 'STATICLIBPATH',
'rpath' : 'RPATH',
'framework' : 'FRAMEWORK',
'frameworkpath' : 'FRAMEWORKPATH'
}
@feature('cc', 'cxx')
@before('init_cxx', 'init_cc')
@before('apply_lib_vars', 'apply_obj_vars', 'apply_incpaths', 'init_cc')
def add_extra_flags(self):
"""case and plural insensitive
before apply_obj_vars for processing the library attributes
"""
for x in self.__dict__.keys():
y = x.lower()
if y[-1] == 's':
y = y[:-1]
if c_attrs.get(y, None):
self.env.append_unique(c_attrs[y], getattr(self, x))
# ============ the code above must not know anything about import libs ==========
@feature('cshlib')
@after('apply_link', 'default_cc')
@before('apply_lib_vars', 'apply_objdeps', 'default_link_install')
def apply_implib(self):
"""On mswindows, handle dlls and their import libs
the .dll.a is the import lib and it is required for linking so it is installed too
"""
if not self.env.DEST_BINFMT == 'pe':
return
self.meths.remove('default_link_install')
bindir = self.install_path
if not bindir: return
# install the dll in the bin dir
dll = self.link_task.outputs[0]
self.bld.install_files(bindir, dll, self.env, self.chmod)
# add linker flags to generate the import lib
implib = self.env['implib_PATTERN'] % os.path.split(self.target)[1]
implib = dll.parent.find_or_declare(implib)
self.link_task.outputs.append(implib)
self.bld.install_as('${LIBDIR}/%s' % implib.name, implib, self.env)
self.env.append_value('LINKFLAGS', (self.env['IMPLIB_ST'] % implib.bldpath(self.env)).split())
# ============ the code above must not know anything about vnum processing on unix platforms =========
@feature('cshlib')
@after('apply_link')
@before('apply_lib_vars', 'default_link_install')
def apply_vnum(self):
"""
libfoo.so is installed as libfoo.so.1.2.3
"""
if not getattr(self, 'vnum', '') or not 'cshlib' in self.features or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
return
self.meths.remove('default_link_install')
link = self.link_task
nums = self.vnum.split('.')
node = link.outputs[0]
libname = node.name
if libname.endswith('.dylib'):
name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
name2 = libname.replace('.dylib', '.%s.dylib' % nums[0])
else:
name3 = libname + '.' + self.vnum
name2 = libname + '.' + nums[0]
if self.env.SONAME_ST:
v = self.env.SONAME_ST % name2
self.env.append_value('LINKFLAGS', v.split())
bld = self.bld
nums = self.vnum.split('.')
path = self.install_path
if not path: return
bld.install_as(path + os.sep + name3, node, env=self.env)
bld.symlink_as(path + os.sep + name2, name3)
bld.symlink_as(path + os.sep + libname, name3)
# the following task is just to enable execution from the build dir :-/
tsk = self.create_task('vnum')
tsk.set_inputs([node])
tsk.set_outputs(node.parent.find_or_declare(name2))
def exec_vnum_link(self):
path = self.outputs[0].abspath(self.env)
try:
os.remove(path)
except OSError:
pass
try:
os.symlink(self.inputs[0].name, path)
except OSError:
return 1
cls = Task.task_type_from_func('vnum', func=exec_vnum_link, ext_in='.bin', color='CYAN')
cls.quiet = 1
# ============ the --as-needed flag should added during the configuration, not at runtime =========
@conftest
def add_as_needed(conf):
if conf.env.DEST_BINFMT == 'elf' and 'gcc' in (conf.env.CXX_NAME, conf.env.CC_NAME):
conf.env.append_unique('LINKFLAGS', '--as-needed')

66
tools/wafadmin/Tools/compiler_cc.py

@ -1,66 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
import os, sys, imp, types, ccroot
import optparse
import Utils, Configure, Options
from Logs import debug
c_compiler = {
'win32': ['gcc'],
'cygwin': ['gcc'],
'darwin': ['gcc'],
'aix': ['xlc', 'gcc'],
'linux': ['gcc', 'icc', 'suncc'],
'sunos': ['gcc', 'suncc'],
'irix': ['gcc'],
'hpux': ['gcc'],
'default': ['gcc']
}
def __list_possible_compiler(platform):
try:
return c_compiler[platform]
except KeyError:
return c_compiler["default"]
def detect(conf):
"""
for each compiler for the platform, try to configure the compiler
in theory the tools should raise a configuration error if the compiler
pretends to be something it is not (setting CC=icc and trying to configure gcc)
"""
try: test_for_compiler = Options.options.check_c_compiler
except AttributeError: conf.fatal("Add set_options(opt): opt.tool_options('compiler_cc')")
orig = conf.env
for compiler in test_for_compiler.split():
conf.env = orig.copy()
try:
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cc: %r' % e)
else:
if conf.env['CC']:
orig.table = conf.env.get_merged_dict()
conf.env = orig
conf.check_message(compiler, '', True)
conf.env['COMPILER_CC'] = compiler
break
conf.check_message(compiler, '', False)
break
else:
conf.fatal('could not configure a c compiler!')
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = __list_possible_compiler(build_platform)
test_for_compiler = ' '.join(possible_compiler_list)
cc_compiler_opts = opt.add_option_group("C Compiler Options")
cc_compiler_opts.add_option('--check-c-compiler', default="%s" % test_for_compiler,
help='On this platform (%s) the following C-Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
dest="check_c_compiler")
for c_compiler in test_for_compiler.split():
opt.tool_options('%s' % c_compiler, option_group=cc_compiler_opts)

61
tools/wafadmin/Tools/compiler_cxx.py

@ -1,61 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
import os, sys, imp, types, ccroot
import optparse
import Utils, Configure, Options
from Logs import debug
cxx_compiler = {
'win32': ['g++'],
'cygwin': ['g++'],
'darwin': ['g++'],
'aix': ['xlc++', 'g++'],
'linux': ['g++', 'icpc', 'sunc++'],
'sunos': ['g++', 'sunc++'],
'irix': ['g++'],
'hpux': ['g++'],
'default': ['g++']
}
def __list_possible_compiler(platform):
try:
return cxx_compiler[platform]
except KeyError:
return cxx_compiler["default"]
def detect(conf):
try: test_for_compiler = Options.options.check_cxx_compiler
except AttributeError: raise Configure.ConfigurationError("Add set_options(opt): opt.tool_options('compiler_cxx')")
orig = conf.env
for compiler in test_for_compiler.split():
try:
conf.env = orig.copy()
conf.check_tool(compiler)
except Configure.ConfigurationError, e:
debug('compiler_cxx: %r' % e)
else:
if conf.env['CXX']:
orig.table = conf.env.get_merged_dict()
conf.env = orig
conf.check_message(compiler, '', True)
conf.env['COMPILER_CXX'] = compiler
break
conf.check_message(compiler, '', False)
break
else:
conf.fatal('could not configure a cxx compiler!')
def set_options(opt):
build_platform = Utils.unversioned_sys_platform()
possible_compiler_list = __list_possible_compiler(build_platform)
test_for_compiler = ' '.join(possible_compiler_list)
cxx_compiler_opts = opt.add_option_group('C++ Compiler Options')
cxx_compiler_opts.add_option('--check-cxx-compiler', default="%s" % test_for_compiler,
help='On this platform (%s) the following C++ Compiler will be checked by default: "%s"' % (build_platform, test_for_compiler),
dest="check_cxx_compiler")
for cxx_compiler in test_for_compiler.split():
opt.tool_options('%s' % cxx_compiler, option_group=cxx_compiler_opts)

33
tools/wafadmin/Tools/compiler_d.py

@ -1,33 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
import os, sys, imp, types
import Utils, Configure, Options
def detect(conf):
if getattr(Options.options, 'check_dmd_first', None):
test_for_compiler = ['dmd', 'gdc']
else:
test_for_compiler = ['gdc', 'dmd']
for d_compiler in test_for_compiler:
try:
conf.check_tool(d_compiler)
except:
pass
else:
break
else:
conf.fatal('no suitable d compiler was found')
def set_options(opt):
d_compiler_opts = opt.add_option_group('D Compiler Options')
d_compiler_opts.add_option('--check-dmd-first', action='store_true',
help='checks for the gdc compiler before dmd (default is the other way round)',
dest='check_dmd_first',
default=False)
for d_compiler in ['gdc', 'dmd']:
opt.tool_options('%s' % d_compiler, option_group=d_compiler_opts)

729
tools/wafadmin/Tools/config_c.py

@ -1,729 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2008 (ita)
"""
c/c++ configuration routines
"""
import os, imp, sys, shlex, shutil
from Utils import md5
import Build, Utils, Configure, Task, Options, Logs, TaskGen
from Constants import *
from Configure import conf, conftest
cfg_ver = {
'atleast-version': '>=',
'exact-version': '==',
'max-version': '<=',
}
SNIP1 = '''
int main() {
void *p;
p=(void*)(%s);
return 0;
}
'''
SNIP2 = '''
int main() {
if ((%(type_name)s *) 0) return 0;
if (sizeof (%(type_name)s)) return 0;
}
'''
SNIP3 = '''
int main() {
return 0;
}
'''
def parse_flags(line, uselib, env):
"""pkg-config still has bugs on some platforms, and there are many -config programs, parsing flags is necessary :-/"""
lst = shlex.split(line)
while lst:
x = lst.pop(0)
st = x[:2]
ot = x[2:]
if st == '-I' or st == '/I':
if not ot: ot = lst.pop(0)
env.append_unique('CPPPATH_' + uselib, ot)
elif st == '-D':
if not ot: ot = lst.pop(0)
env.append_unique('CXXDEFINES_' + uselib, ot)
env.append_unique('CCDEFINES_' + uselib, ot)
elif st == '-l':
if not ot: ot = lst.pop(0)
env.append_unique('LIB_' + uselib, ot)
elif st == '-L':
if not ot: ot = lst.pop(0)
env.append_unique('LIBPATH_' + uselib, ot)
elif x == '-pthread' or x.startswith('+'):
env.append_unique('CCFLAGS_' + uselib, x)
env.append_unique('CXXFLAGS_' + uselib, x)
env.append_unique('LINKFLAGS_' + uselib, x)
elif x == '-framework':
env.append_unique('FRAMEWORK_' + uselib, lst.pop(0))
elif x.startswith('-F'):
env.append_unique('FRAMEWORKPATH_' + uselib, x[2:])
elif x.startswith('-std'):
env.append_unique('CCFLAGS_' + uselib, x)
env.append_unique('LINKFLAGS_' + uselib, x)
elif x.startswith('-Wl'):
env.append_unique('LINKFLAGS_' + uselib, x)
elif x.startswith('-m') or x.startswith('-f'):
env.append_unique('CCFLAGS_' + uselib, x)
env.append_unique('CXXFLAGS_' + uselib, x)
@conf
def ret_msg(self, f, kw):
"""execute a function, when provided"""
if isinstance(f, str):
return f
return f(kw)
@conf
def validate_cfg(self, kw):
if not 'path' in kw:
kw['path'] = 'pkg-config --errors-to-stdout --print-errors'
# pkg-config version
if 'atleast_pkgconfig_version' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for pkg-config version >= %s' % kw['atleast_pkgconfig_version']
return
# pkg-config --modversion
if 'modversion' in kw:
return
if 'variables' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for %s variables' % kw['package']
return
# checking for the version of a module, for the moment, one thing at a time
for x in cfg_ver.keys():
y = x.replace('-', '_')
if y in kw:
if not 'package' in kw:
raise ValueError('%s requires a package' % x)
if not 'msg' in kw:
kw['msg'] = 'Checking for %s %s %s' % (kw['package'], cfg_ver[x], kw[y])
return
if not 'msg' in kw:
kw['msg'] = 'Checking for %s' % (kw['package'] or kw['path'])
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
@conf
def cmd_and_log(self, cmd, kw):
Logs.debug('runner: %s\n' % cmd)
if self.log:
self.log.write('%s\n' % cmd)
try:
p = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE, shell=True)
(out, err) = p.communicate()
except OSError, e:
self.log.write('error %r' % e)
self.fatal(str(e))
out = str(out)
err = str(err)
if self.log:
self.log.write(out)
self.log.write(err)
if p.returncode:
if not kw.get('errmsg', ''):
if kw.get('mandatory', False):
kw['errmsg'] = out.strip()
else:
kw['errmsg'] = 'no'
self.fatal('fail')
return out
@conf
def exec_cfg(self, kw):
# pkg-config version
if 'atleast_pkgconfig_version' in kw:
cmd = '%s --atleast-pkgconfig-version=%s' % (kw['path'], kw['atleast_pkgconfig_version'])
self.cmd_and_log(cmd, kw)
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
return
# checking for the version of a module
for x in cfg_ver:
y = x.replace('-', '_')
if y in kw:
self.cmd_and_log('%s --%s=%s %s' % (kw['path'], x, kw[y], kw['package']), kw)
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
break
# retrieving the version of a module
if 'modversion' in kw:
version = self.cmd_and_log('%s --modversion %s' % (kw['path'], kw['modversion']), kw).strip()
self.define('%s_VERSION' % Utils.quote_define_name(kw.get('uselib_store', kw['modversion'])), version)
return version
# retrieving variables of a module
if 'variables' in kw:
env = kw.get('env', self.env)
uselib = kw.get('uselib_store', kw['package'].upper())
vars = Utils.to_list(kw['variables'])
for v in vars:
val = self.cmd_and_log('%s --variable=%s %s' % (kw['path'], v, kw['package']), kw).strip()
var = '%s_%s' % (uselib, v)
env[var] = val
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
return
lst = [kw['path']]
for key, val in kw.get('define_variable', {}).iteritems():
lst.append('--define-variable=%s=%s' % (key, val))
lst.append(kw.get('args', ''))
lst.append(kw['package'])
# so we assume the command-line will output flags to be parsed afterwards
cmd = ' '.join(lst)
ret = self.cmd_and_log(cmd, kw)
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
self.define(self.have_define(kw.get('uselib_store', kw['package'])), 1, 0)
parse_flags(ret, kw.get('uselib_store', kw['package'].upper()), kw.get('env', self.env))
return ret
@conf
def check_cfg(self, *k, **kw):
"""
for pkg-config mostly, but also all the -config tools
conf.check_cfg(path='mpicc', args='--showme:compile --showme:link', package='', uselib_store='OPEN_MPI')
conf.check_cfg(package='dbus-1', variables='system_bus_default_address session_bus_services_dir')
"""
self.validate_cfg(kw)
if 'msg' in kw:
self.check_message_1(kw['msg'])
ret = None
try:
ret = self.exec_cfg(kw)
except Configure.ConfigurationError, e:
if 'errmsg' in kw:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw and kw['mandatory']:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
kw['success'] = ret
if 'okmsg' in kw:
self.check_message_2(self.ret_msg(kw['okmsg'], kw))
return ret
# the idea is the following: now that we are certain
# that all the code here is only for c or c++, it is
# easy to put all the logic in one function
#
# this should prevent code duplication (ita)
# env: an optional environment (modified -> provide a copy)
# compiler: cc or cxx - it tries to guess what is best
# type: cprogram, cshlib, cstaticlib
# code: a c code to execute
# uselib_store: where to add the variables
# uselib: parameters to use for building
# define: define to set, like FOO in #define FOO, if not set, add /* #undef FOO */
# execute: True or False - will return the result of the execution
@conf
def validate_c(self, kw):
"""validate the parameters for the test method"""
if not 'env' in kw:
kw['env'] = self.env.copy()
env = kw['env']
if not 'compiler' in kw:
kw['compiler'] = 'cc'
if env['CXX_NAME'] and Task.TaskBase.classes.get('cxx', None):
kw['compiler'] = 'cxx'
if not self.env['CXX']:
self.fatal('a c++ compiler is required')
else:
if not self.env['CC']:
self.fatal('a c compiler is required')
if not 'type' in kw:
kw['type'] = 'cprogram'
assert not(kw['type'] != 'cprogram' and kw.get('execute', 0)), 'can only execute programs'
#if kw['type'] != 'program' and kw.get('execute', 0):
# raise ValueError, 'can only execute programs'
def to_header(dct):
if 'header_name' in dct:
dct = Utils.to_list(dct['header_name'])
return ''.join(['#include <%s>\n' % x for x in dct])
return ''
# set the file name
if not 'compile_mode' in kw:
kw['compile_mode'] = (kw['compiler'] == 'cxx') and 'cxx' or 'cc'
if not 'compile_filename' in kw:
kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
#OSX
if 'framework_name' in kw:
try: TaskGen.task_gen.create_task_macapp
except AttributeError: self.fatal('frameworks require the osx tool')
fwkname = kw['framework_name']
if not 'uselib_store' in kw:
kw['uselib_store'] = fwkname.upper()
if not kw.get('no_header', False):
if not 'header_name' in kw:
kw['header_name'] = []
fwk = '%s/%s.h' % (fwkname, fwkname)
if kw.get('remove_dot_h', None):
fwk = fwk[:-2]
kw['header_name'] = Utils.to_list(kw['header_name']) + [fwk]
kw['msg'] = 'Checking for framework %s' % fwkname
kw['framework'] = fwkname
#kw['frameworkpath'] = set it yourself
if 'function_name' in kw:
fu = kw['function_name']
if not 'msg' in kw:
kw['msg'] = 'Checking for function %s' % fu
kw['code'] = to_header(kw) + SNIP1 % fu
if not 'uselib_store' in kw:
kw['uselib_store'] = fu.upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(fu)
elif 'type_name' in kw:
tu = kw['type_name']
if not 'msg' in kw:
kw['msg'] = 'Checking for type %s' % tu
if not 'header_name' in kw:
kw['header_name'] = 'stdint.h'
kw['code'] = to_header(kw) + SNIP2 % {'type_name' : tu}
if not 'define_name' in kw:
kw['define_name'] = self.have_define(tu.upper())
elif 'header_name' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for header %s' % kw['header_name']
l = Utils.to_list(kw['header_name'])
assert len(l)>0, 'list of headers in header_name is empty'
kw['code'] = to_header(kw) + SNIP3
if not 'uselib_store' in kw:
kw['uselib_store'] = l[0].upper()
if not 'define_name' in kw:
kw['define_name'] = self.have_define(l[0])
if 'lib' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for library %s' % kw['lib']
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['lib'].upper()
if 'staticlib' in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for static library %s' % kw['staticlib']
if not 'uselib_store' in kw:
kw['uselib_store'] = kw['staticlib'].upper()
if 'fragment' in kw:
# an additional code fragment may be provided to replace the predefined code
# in custom headers
kw['code'] = kw['fragment']
if not 'msg' in kw:
kw['msg'] = 'Checking for custom code'
if not 'errmsg' in kw:
kw['errmsg'] = 'no'
for (flagsname,flagstype) in [('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')]:
if flagsname in kw:
if not 'msg' in kw:
kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
if not 'errmsg' in kw:
kw['errmsg'] = 'no'
if not 'execute' in kw:
kw['execute'] = False
if not 'errmsg' in kw:
kw['errmsg'] = 'not found'
if not 'okmsg' in kw:
kw['okmsg'] = 'yes'
if not 'code' in kw:
kw['code'] = SNIP3
if not kw.get('success'): kw['success'] = None
assert 'msg' in kw, 'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
@conf
def post_check(self, *k, **kw):
"set the variables after a test was run successfully"
is_success = False
if kw['execute']:
if kw['success']:
is_success = True
else:
is_success = (kw['success'] == 0)
if 'define_name' in kw:
if 'header_name' in kw or 'function_name' in kw or 'type_name' in kw or 'fragment' in kw:
if kw['execute']:
key = kw['success']
if isinstance(key, str):
if key:
self.define(kw['define_name'], key, quote=kw.get('quote', 1))
else:
self.define_cond(kw['define_name'], True)
else:
self.define_cond(kw['define_name'], False)
else:
self.define_cond(kw['define_name'], is_success)
if is_success and 'uselib_store' in kw:
import cc, cxx
for k in set(cc.g_cc_flag_vars).union(cxx.g_cxx_flag_vars):
lk = k.lower()
# inconsistency: includes -> CPPPATH
if k == 'CPPPATH': lk = 'includes'
if k == 'CXXDEFINES': lk = 'defines'
if k == 'CCDEFINES': lk = 'defines'
if lk in kw:
val = kw[lk]
# remove trailing slash
if isinstance(val, str):
val = val.rstrip(os.path.sep)
self.env.append_unique(k + '_' + kw['uselib_store'], val)
@conf
def check(self, *k, **kw):
# so this will be the generic function
# it will be safer to use check_cxx or check_cc
self.validate_c(kw)
self.check_message_1(kw['msg'])
ret = None
try:
ret = self.run_c_code(*k, **kw)
except Configure.ConfigurationError, e:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw and kw['mandatory']:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
kw['success'] = ret
self.check_message_2(self.ret_msg(kw['okmsg'], kw))
self.post_check(*k, **kw)
if not kw.get('execute', False):
return ret == 0
return ret
@conf
def run_c_code(self, *k, **kw):
test_f_name = kw['compile_filename']
k = 0
while k < 10000:
# make certain to use a fresh folder - necessary for win32
dir = os.path.join(self.blddir, '.conf_check_%d' % k)
# if the folder already exists, remove it
try:
shutil.rmtree(dir)
except OSError:
pass
try:
os.stat(dir)
except OSError:
break
k += 1
try:
os.makedirs(dir)
except:
self.fatal('cannot create a configuration test folder %r' % dir)
try:
os.stat(dir)
except:
self.fatal('cannot use the configuration test folder %r' % dir)
bdir = os.path.join(dir, 'testbuild')
if not os.path.exists(bdir):
os.makedirs(bdir)
env = kw['env']
dest = open(os.path.join(dir, test_f_name), 'w')
dest.write(kw['code'])
dest.close()
back = os.path.abspath('.')
bld = Build.BuildContext()
bld.log = self.log
bld.all_envs.update(self.all_envs)
bld.all_envs['Release'] = env
bld.lst_variants = bld.all_envs.keys()
bld.load_dirs(dir, bdir)
os.chdir(dir)
bld.rescan(bld.srcnode)
if not 'features' in kw:
# conf.check(features='cc cprogram pyext', ...)
kw['features'] = [kw['compile_mode'], kw['type']] # "cprogram cc"
o = bld(features=kw['features'], source=test_f_name, target='testprog')
for k, v in kw.iteritems():
setattr(o, k, v)
self.log.write("==>\n%s\n<==\n" % kw['code'])
# compile the program
try:
bld.compile()
except Utils.WafError:
ret = Utils.ex_stack()
else:
ret = 0
# chdir before returning
os.chdir(back)
if ret:
self.log.write('command returned %r' % ret)
self.fatal(str(ret))
# if we need to run the program, try to get its result
# keep the name of the program to execute
if kw['execute']:
lastprog = o.link_task.outputs[0].abspath(env)
args = Utils.to_list(kw.get('exec_args', []))
proc = Utils.pproc.Popen([lastprog] + args, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE)
(out, err) = proc.communicate()
w = self.log.write
w(str(out))
w('\n')
w(str(err))
w('\n')
w('returncode %r' % proc.returncode)
w('\n')
if proc.returncode:
self.fatal(Utils.ex_stack())
ret = out
return ret
@conf
def check_cxx(self, *k, **kw):
kw['compiler'] = 'cxx'
return self.check(*k, **kw)
@conf
def check_cc(self, *k, **kw):
kw['compiler'] = 'cc'
return self.check(*k, **kw)
@conf
def define(self, define, value, quote=1):
"""store a single define and its state into an internal list for later
writing to a config header file. Value can only be
a string or int; other types not supported. String
values will appear properly quoted in the generated
header file."""
assert define and isinstance(define, str)
# ordered_dict is for writing the configuration header in order
tbl = self.env[DEFINES] or Utils.ordered_dict()
# the user forgot to tell if the value is quoted or not
if isinstance(value, str):
if quote:
tbl[define] = '"%s"' % repr('"'+value)[2:-1].replace('"', '\\"')
else:
tbl[define] = value
elif isinstance(value, int):
tbl[define] = value
else:
raise TypeError('define %r -> %r must be a string or an int' % (define, value))
# add later to make reconfiguring faster
self.env[DEFINES] = tbl
self.env[define] = value # <- not certain this is necessary
@conf
def undefine(self, define):
"""store a single define and its state into an internal list
for later writing to a config header file"""
assert define and isinstance(define, str)
tbl = self.env[DEFINES] or Utils.ordered_dict()
value = UNDEFINED
tbl[define] = value
# add later to make reconfiguring faster
self.env[DEFINES] = tbl
self.env[define] = value
@conf
def define_cond(self, name, value):
"""Conditionally define a name.
Formally equivalent to: if value: define(name, 1) else: undefine(name)"""
if value:
self.define(name, 1)
else:
self.undefine(name)
@conf
def is_defined(self, key):
defines = self.env[DEFINES]
if not defines:
return False
try:
value = defines[key]
except KeyError:
return False
else:
return value != UNDEFINED
@conf
def get_define(self, define):
"get the value of a previously stored define"
try: return self.env[DEFINES][define]
except KeyError: return None
@conf
def have_define(self, name):
"prefix the define with 'HAVE_' and make sure it has valid characters."
return self.__dict__.get('HAVE_PAT', 'HAVE_%s') % Utils.quote_define_name(name)
@conf
def write_config_header(self, configfile='', env='', guard='', top=False):
"save the defines into a file"
if not configfile: configfile = WAF_CONFIG_H
waf_guard = guard or '_%s_WAF' % Utils.quote_define_name(configfile)
# configfile -> absolute path
# there is a good reason to concatenate first and to split afterwards
if not env: env = self.env
if top:
diff = ''
else:
diff = Utils.diff_path(self.srcdir, self.curdir)
full = os.sep.join([self.blddir, env.variant(), diff, configfile])
full = os.path.normpath(full)
(dir, base) = os.path.split(full)
try: os.makedirs(dir)
except: pass
dest = open(full, 'w')
dest.write('/* Configuration header created by Waf - do not edit */\n')
dest.write('#ifndef %s\n#define %s\n\n' % (waf_guard, waf_guard))
dest.write(self.get_config_header())
# config files are not removed on "waf clean"
env.append_unique(CFG_FILES, os.path.join(diff, configfile))
dest.write('\n#endif /* %s */\n' % waf_guard)
dest.close()
@conf
def get_config_header(self):
"""Fill-in the contents of the config header. Override when you need to write your own config header."""
config_header = []
tbl = self.env[DEFINES] or Utils.ordered_dict()
for key in tbl.allkeys:
value = tbl[key]
if value is None:
config_header.append('#define %s' % key)
elif value is UNDEFINED:
config_header.append('/* #undef %s */' % key)
else:
config_header.append('#define %s %s' % (key, value))
return "\n".join(config_header)
@conftest
def find_cpp(conf):
v = conf.env
cpp = None
if v['CPP']: cpp = v['CPP']
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
if not cpp: cpp = conf.find_program('cpp', var='CPP')
if not cpp: cpp = v['CC']
if not cpp: cpp = v['CXX']
v['CPP'] = cpp
@conftest
def cc_add_flags(conf):
conf.add_os_flags('CFLAGS', 'CCFLAGS')
conf.add_os_flags('CPPFLAGS')
@conftest
def cxx_add_flags(conf):
conf.add_os_flags('CXXFLAGS')
conf.add_os_flags('CPPFLAGS')
@conftest
def link_add_flags(conf):
conf.add_os_flags('LINKFLAGS')
conf.add_os_flags('LDFLAGS', 'LINKFLAGS')
@conftest
def cc_load_tools(conf):
conf.check_tool('cc')
@conftest
def cxx_load_tools(conf):
conf.check_tool('cxx')

104
tools/wafadmin/Tools/cxx.py

@ -1,104 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"Base for c++ programs and libraries"
import TaskGen, Task, Utils
from Logs import debug
import ccroot # <- do not remove
from TaskGen import feature, before, extension, after
g_cxx_flag_vars = [
'CXXDEPS', 'FRAMEWORK', 'FRAMEWORKPATH',
'STATICLIB', 'LIB', 'LIBPATH', 'LINKFLAGS', 'RPATH',
'CXXFLAGS', 'CCFLAGS', 'CPPPATH', 'CPPFLAGS', 'CXXDEFINES']
"main cpp variables"
EXT_CXX = ['.cpp', '.cc', '.cxx', '.C', '.c++']
g_cxx_type_vars=['CXXFLAGS', 'LINKFLAGS']
# TODO remove in waf 1.6
class cxx_taskgen(ccroot.ccroot_abstract):
pass
@feature('cxx')
@before('apply_type_vars')
@after('default_cc')
def init_cxx(self):
if not 'cc' in self.features:
self.mappings['.c'] = TaskGen.task_gen.mappings['.cxx']
self.p_flag_vars = set(self.p_flag_vars).union(g_cxx_flag_vars)
self.p_type_vars = set(self.p_type_vars).union(g_cxx_type_vars)
if not self.env['CXX_NAME']:
raise Utils.WafError("At least one compiler (g++, ..) must be selected")
@feature('cxx')
@after('apply_incpaths')
def apply_obj_vars_cxx(self):
"""after apply_incpaths for INC_PATHS"""
env = self.env
app = env.append_unique
cxxpath_st = env['CPPPATH_ST']
# local flags come first
# set the user-defined includes paths
for i in env['INC_PATHS']:
app('_CXXINCFLAGS', cxxpath_st % i.bldpath(env))
app('_CXXINCFLAGS', cxxpath_st % i.srcpath(env))
# set the library include paths
for i in env['CPPPATH']:
app('_CXXINCFLAGS', cxxpath_st % i)
@feature('cxx')
@after('apply_lib_vars')
def apply_defines_cxx(self):
"""after uselib is set for CXXDEFINES"""
self.defines = getattr(self, 'defines', [])
lst = self.to_list(self.defines) + self.to_list(self.env['CXXDEFINES'])
milst = []
# now process the local defines
for defi in lst:
if not defi in milst:
milst.append(defi)
# CXXDEFINES_USELIB
libs = self.to_list(self.uselib)
for l in libs:
val = self.env['CXXDEFINES_'+l]
if val: milst += self.to_list(val)
self.env['DEFLINES'] = ["%s %s" % (x[0], Utils.trimquotes('='.join(x[1:]))) for x in [y.split('=') for y in milst]]
y = self.env['CXXDEFINES_ST']
self.env['_CXXDEFFLAGS'] = [y%x for x in milst]
@extension(EXT_CXX)
def cxx_hook(self, node):
# create the compilation task: cpp or cc
if getattr(self, 'obj_ext', None):
obj_ext = self.obj_ext
else:
obj_ext = '_%d.o' % self.idx
task = self.create_task('cxx', node, node.change_ext(obj_ext))
try:
self.compiled_tasks.append(task)
except AttributeError:
raise Utils.WafError('Have you forgotten to set the feature "cxx" on %s?' % str(self))
return task
cxx_str = '${CXX} ${CXXFLAGS} ${CPPFLAGS} ${_CXXINCFLAGS} ${_CXXDEFFLAGS} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT}'
cls = Task.simple_task_type('cxx', cxx_str, color='GREEN', ext_out='.o', ext_in='.cxx', shell=False)
cls.scan = ccroot.scan
cls.vars.append('CXXDEPS')
link_str = '${LINK_CXX} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath(env)} ${LINKFLAGS}'
cls = Task.simple_task_type('cxx_link', link_str, color='YELLOW', ext_in='.o', ext_out='.bin', shell=False)
cls.maxjobs = 1
cls.install = Utils.nada

540
tools/wafadmin/Tools/d.py

@ -1,540 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2007-2008 (ita)
import os, sys, re, optparse
import ccroot # <- leave this
import TaskGen, Utils, Task, Configure, Logs, Build
from Logs import debug, error
from TaskGen import taskgen, feature, after, before, extension
from Configure import conftest
EXT_D = ['.d', '.di', '.D']
D_METHS = ['apply_core', 'apply_vnum', 'apply_objdeps'] # additional d methods
def filter_comments(filename):
txt = Utils.readf(filename)
buf = []
i = 0
max = len(txt)
while i < max:
c = txt[i]
# skip a string
if c == '"':
i += 1
c = ''
while i < max:
p = c
c = txt[i]
i += 1
if i == max: return buf
if c == '"':
cnt = 0
while i < cnt and i < max:
#print "cntcnt = ", str(cnt), self.txt[self.i-2-cnt]
if txt[i-2-cnt] == '\\': cnt+=1
else: break
#print "cnt is ", str(cnt)
if (cnt%2)==0: break
i += 1
# skip a char
elif c == "'":
i += 1
if i == max: return buf
c = txt[i]
if c == '\\':
i += 1
if i == max: return buf
c = txt[i]
if c == 'x':
i += 2 # skip two chars
elif c == 'u':
i += 4 # skip unicode chars
i += 1
if i == max: return buf
c = txt[i]
if c != '\'': error("uh-oh, invalid character")
# skip a comment
elif c == '/':
if i == max: break
c = txt[i+1]
# eat /+ +/ comments
if c == '+':
i += 1
nesting = 1
prev = 0
while i < max:
c = txt[i]
if c == '+':
prev = 1
elif c == '/':
if prev:
nesting -= 1
if nesting == 0: break
else:
if i < max:
i += 1
c = txt[i]
if c == '+':
nesting += 1
else:
return buf
else:
prev = 0
i += 1
# eat /* */ comments
elif c == '*':
i += 1
while i < max:
c = txt[i]
if c == '*':
prev = 1
elif c == '/':
if prev: break
else:
prev = 0
i += 1
# eat // comments
elif c == '/':
i += 1
c = txt[i]
while i < max and c != '\n':
i += 1
c = txt[i]
# a valid char, add it to the buffer
else:
buf.append(c)
i += 1
return buf
class d_parser(object):
def __init__(self, env, incpaths):
#self.code = ''
#self.module = ''
#self.imports = []
self.allnames = []
self.re_module = re.compile("module\s+([^;]+)")
self.re_import = re.compile("import\s+([^;]+)")
self.re_import_bindings = re.compile("([^:]+):(.*)")
self.re_import_alias = re.compile("[^=]+=(.+)")
self.env = env
self.nodes = []
self.names = []
self.incpaths = incpaths
def tryfind(self, filename):
found = 0
for n in self.incpaths:
found = n.find_resource(filename.replace('.', '/') + '.d')
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)
def get_strings(self, code):
#self.imports = []
self.module = ''
lst = []
# get the module name (if present)
mod_name = self.re_module.search(code)
if mod_name:
self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
# go through the code, have a look at all import occurrences
# first, lets look at anything beginning with "import" and ending with ";"
import_iterator = self.re_import.finditer(code)
if import_iterator:
for import_match in import_iterator:
import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
# does this end with an import bindings declaration?
# (import bindings always terminate the list of imports)
bindings_match = self.re_import_bindings.match(import_match_str)
if bindings_match:
import_match_str = bindings_match.group(1)
# if so, extract the part before the ":" (since the module declaration(s) is/are located there)
# split the matching string into a bunch of strings, separated by a comma
matches = import_match_str.split(',')
for match in matches:
alias_match = self.re_import_alias.match(match)
if alias_match:
# is this an alias declaration? (alias = module name) if so, extract the module name
match = alias_match.group(1)
lst.append(match)
return lst
def start(self, node):
self.waiting = [node]
# while the stack is not empty, add the dependencies
while self.waiting:
nd = self.waiting.pop(0)
self.iter(nd)
def iter(self, node):
path = node.abspath(self.env) # obtain the absolute path
code = "".join(filter_comments(path)) # read the file and filter the comments
names = self.get_strings(code) # obtain the import strings
for x in names:
# optimization
if x in self.allnames: continue
self.allnames.append(x)
# for each name, see if it is like a node or not
self.tryfind(x)
def scan(self):
"look for .d/.di the .d source need"
env = self.env
gruik = d_parser(env, env['INC_PATHS'])
gruik.start(self.inputs[0])
if Logs.verbose:
debug('deps: nodes found for %s: %s %s' % (str(self.inputs[0]), str(gruik.nodes), str(gruik.names)))
#debug("deps found for %s: %s" % (str(node), str(gruik.deps)), 'deps')
return (gruik.nodes, gruik.names)
def get_target_name(self):
"for d programs and libs"
v = self.env
tp = 'program'
for x in self.features:
if x in ['dshlib', 'dstaticlib']:
tp = x.lstrip('d')
return v['D_%s_PATTERN' % tp] % self.target
d_params = {
'dflags': '',
'importpaths':'',
'libs':'',
'libpaths':'',
'generate_headers':False,
}
@feature('d')
@before('apply_type_vars')
def init_d(self):
for x in d_params:
setattr(self, x, getattr(self, x, d_params[x]))
class d_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
# COMPAT
if len(k) > 1:
self.features.append('d' + k[1])
# okay, we borrow a few methods from ccroot
TaskGen.bind_feature('d', D_METHS)
@feature('d')
@before('apply_d_libs')
def init_d(self):
Utils.def_attrs(self,
dflags='',
importpaths='',
libs='',
libpaths='',
uselib='',
uselib_local='',
generate_headers=False, # set to true if you want .di files as well as .o
compiled_tasks=[],
add_objects=[],
link_task=None)
@feature('d')
@after('apply_d_link', 'init_d')
@before('apply_vnum')
def apply_d_libs(self):
"""after apply_link because of 'link_task'
after default_cc because of the attribute 'uselib'"""
env = self.env
# 1. the case of the libs defined in the project (visit ancestors first)
# the ancestors external libraries (uselib) will be prepended
self.uselib = self.to_list(self.uselib)
names = self.to_list(self.uselib_local)
seen = set([])
tmp = Utils.deque(names) # consume a copy of the list of names
while tmp:
lib_name = tmp.popleft()
# visit dependencies only once
if lib_name in seen:
continue
y = self.name_to_obj(lib_name)
if not y:
raise Utils.WafError('object %r was not found in uselib_local (required by %r)' % (lib_name, self.name))
y.post()
seen.add(lib_name)
# object has ancestors to process (shared libraries): add them to the end of the list
if getattr(y, 'uselib_local', None):
lst = y.to_list(y.uselib_local)
if 'dshlib' in y.features or 'dprogram' in y.features:
lst = [x for x in lst if not 'dstaticlib' in self.name_to_obj(x).features]
tmp.extend(lst)
# link task and flags
if getattr(y, 'link_task', None):
link_name = y.target[y.target.rfind(os.sep) + 1:]
if 'dstaticlib' in y.features or 'dshlib' in y.features:
env.append_unique('DLINKFLAGS', env.DLIB_ST % link_name)
env.append_unique('DLINKFLAGS', env.DLIBPATH_ST % y.link_task.outputs[0].parent.bldpath(env))
# the order
self.link_task.set_run_after(y.link_task)
# for the recompilation
dep_nodes = getattr(self.link_task, 'dep_nodes', [])
self.link_task.dep_nodes = dep_nodes + y.link_task.outputs
# add ancestors uselib too - but only propagate those that have no staticlib
for v in self.to_list(y.uselib):
if not v in self.uselib:
self.uselib.insert(0, v)
# if the library task generator provides 'export_incdirs', add to the include path
# the export_incdirs must be a list of paths relative to the other library
if getattr(y, 'export_incdirs', None):
for x in self.to_list(y.export_incdirs):
node = y.path.find_dir(x)
if not node:
raise Utils.WafError('object %r: invalid folder %r in export_incdirs' % (y.target, x))
self.env.append_unique('INC_PATHS', node)
@feature('dprogram', 'dshlib', 'dstaticlib')
@after('apply_core')
def apply_d_link(self):
link = getattr(self, 'link', None)
if not link:
if 'dstaticlib' in self.features: link = 'static_link'
else: link = 'd_link'
outputs = [t.outputs[0] for t in self.compiled_tasks]
self.link_task = self.create_task(link, outputs, self.path.find_or_declare(get_target_name(self)))
@feature('d')
@after('apply_core')
def apply_d_vars(self):
env = self.env
dpath_st = env['DPATH_ST']
lib_st = env['DLIB_ST']
libpath_st = env['DLIBPATH_ST']
importpaths = self.to_list(self.importpaths)
libpaths = []
libs = []
uselib = self.to_list(self.uselib)
for i in uselib:
if env['DFLAGS_' + i]:
env.append_unique('DFLAGS', env['DFLAGS_' + i])
for x in self.features:
if not x in ['dprogram', 'dstaticlib', 'dshlib']:
continue
x.lstrip('d')
d_shlib_dflags = env['D_' + x + '_DFLAGS']
if d_shlib_dflags:
env.append_unique('DFLAGS', d_shlib_dflags)
# add import paths
for i in uselib:
if env['DPATH_' + i]:
for entry in self.to_list(env['DPATH_' + i]):
if not entry in importpaths:
importpaths.append(entry)
# now process the import paths
for path in importpaths:
if os.path.isabs(path):
env.append_unique('_DIMPORTFLAGS', dpath_st % path)
else:
node = self.path.find_dir(path)
self.env.append_unique('INC_PATHS', node)
env.append_unique('_DIMPORTFLAGS', dpath_st % node.srcpath(env))
env.append_unique('_DIMPORTFLAGS', dpath_st % node.bldpath(env))
# add library paths
for i in uselib:
if env['LIBPATH_' + i]:
for entry in self.to_list(env['LIBPATH_' + i]):
if not entry in libpaths:
libpaths.append(entry)
libpaths = self.to_list(self.libpaths) + libpaths
# now process the library paths
# apply same path manipulation as used with import paths
for path in libpaths:
if not os.path.isabs(path):
node = self.path.find_resource(path)
if not node:
raise Utils.WafError('could not find libpath %r from %r' % (path, self))
path = node.abspath(self.env)
env.append_unique('DLINKFLAGS', libpath_st % path)
# add libraries
for i in uselib:
if env['LIB_' + i]:
for entry in self.to_list(env['LIB_' + i]):
if not entry in libs:
libs.append(entry)
libs.extend(self.to_list(self.libs))
# process user flags
for flag in self.to_list(self.dflags):
env.append_unique('DFLAGS', flag)
# now process the libraries
for lib in libs:
env.append_unique('DLINKFLAGS', lib_st % lib)
# add linker flags
for i in uselib:
dlinkflags = env['DLINKFLAGS_' + i]
if dlinkflags:
for linkflag in dlinkflags:
env.append_unique('DLINKFLAGS', linkflag)
@feature('dshlib')
@after('apply_d_vars')
def add_shlib_d_flags(self):
for linkflag in self.env['D_shlib_LINKFLAGS']:
self.env.append_unique('DLINKFLAGS', linkflag)
@extension(EXT_D)
def d_hook(self, node):
# create the compilation task: cpp or cc
task = self.create_task(self.generate_headers and 'd_with_header' or 'd')
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task.inputs = [node]
task.outputs = [node.change_ext(obj_ext)]
self.compiled_tasks.append(task)
if self.generate_headers:
header_node = node.change_ext(self.env['DHEADER_ext'])
task.outputs += [header_node]
d_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} ${D_SRC_F}${SRC} ${D_TGT_F}${TGT}'
d_with_header_str = '${D_COMPILER} ${DFLAGS} ${_DIMPORTFLAGS} \
${D_HDR_F}${TGT[1].bldpath(env)} \
${D_SRC_F}${SRC} \
${D_TGT_F}${TGT[0].bldpath(env)}'
link_str = '${D_LINKER} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F}${TGT} ${DLINKFLAGS}'
def override_exec(cls):
"""stupid dmd wants -of stuck to the file name"""
old_exec = cls.exec_command
def exec_command(self, *k, **kw):
if isinstance(k[0], list):
lst = k[0]
for i in xrange(len(lst)):
if lst[i] == '-of':
del lst[i]
lst[i] = '-of' + lst[i]
break
return old_exec(self, *k, **kw)
cls.exec_command = exec_command
cls = Task.simple_task_type('d', d_str, 'GREEN', before='static_link d_link', shell=False)
cls.scan = scan
override_exec(cls)
cls = Task.simple_task_type('d_with_header', d_with_header_str, 'GREEN', before='static_link d_link', shell=False)
override_exec(cls)
cls = Task.simple_task_type('d_link', link_str, color='YELLOW', shell=False)
override_exec(cls)
# for feature request #104
@taskgen
def generate_header(self, filename, install_path):
if not hasattr(self, 'header_lst'): self.header_lst = []
self.meths.append('process_header')
self.header_lst.append([filename, install_path])
@before('apply_core')
def process_header(self):
env = self.env
for i in getattr(self, 'header_lst', []):
node = self.path.find_resource(i[0])
if not node:
raise Utils.WafError('file not found on d obj '+i[0])
task = self.create_task('d_header')
task.set_inputs(node)
task.set_outputs(node.change_ext('.di'))
d_header_str = '${D_COMPILER} ${D_HEADER} ${SRC}'
Task.simple_task_type('d_header', d_header_str, color='BLUE', shell=False)
@conftest
def d_platform_flags(conf):
v = conf.env
binfmt = v.DEST_BINFMT or Utils.unversioned_sys_platform_to_binary_format(
v.DEST_OS or Utils.unversioned_sys_platform())
if binfmt == 'pe':
v['D_program_PATTERN'] = '%s.exe'
v['D_shlib_PATTERN'] = 'lib%s.dll'
v['D_staticlib_PATTERN'] = 'lib%s.a'
else:
v['D_program_PATTERN'] = '%s'
v['D_shlib_PATTERN'] = 'lib%s.so'
v['D_staticlib_PATTERN'] = 'lib%s.a'
# quick test #
if __name__ == "__main__":
#Logs.verbose = 2
try: arg = sys.argv[1]
except IndexError: arg = "file.d"
print("".join(filter_comments(arg)))
# TODO
paths = ['.']
#gruik = filter()
#gruik.start(arg)
#code = "".join(gruik.buf)
#print "we have found the following code"
#print code
#print "now parsing"
#print "-------------------------------------------"
"""
parser_ = d_parser()
parser_.start(arg)
print "module: %s" % parser_.module
print "imports: ",
for imp in parser_.imports:
print imp + " ",
print
"""

64
tools/wafadmin/Tools/dmd.py

@ -1,64 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
# Thomas Nagy, 2008 (ita)
import sys
import Utils, ar
from Configure import conftest
@conftest
def find_dmd(conf):
conf.find_program(['dmd', 'ldc'], var='D_COMPILER', mandatory=True)
@conftest
def common_flags_ldc(conf):
v = conf.env
v['DFLAGS'] = ['-d-version=Posix']
v['DLINKFLAGS'] = []
v['D_shlib_DFLAGS'] = ['-relocation-model=pic']
@conftest
def common_flags_dmd(conf):
v = conf.env
# _DFLAGS _DIMPORTFLAGS
# Compiler is dmd so 'gdc' part will be ignored, just
# ensure key is there, so wscript can append flags to it
v['DFLAGS'] = ['-version=Posix']
v['D_SRC_F'] = ''
v['D_TGT_F'] = ['-c', '-of']
v['DPATH_ST'] = '-I%s' # template for adding import paths
# linker
v['D_LINKER'] = v['D_COMPILER']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = '-of'
v['DLIB_ST'] = '-L-l%s' # template for adding libs
v['DLIBPATH_ST'] = '-L-L%s' # template for adding libpaths
# linker debug levels
v['DFLAGS_OPTIMIZED'] = ['-O']
v['DFLAGS_DEBUG'] = ['-g', '-debug']
v['DFLAGS_ULTRADEBUG'] = ['-g', '-debug']
v['DLINKFLAGS'] = ['-quiet']
v['D_shlib_DFLAGS'] = ['-fPIC']
v['D_shlib_LINKFLAGS'] = ['-L-shared']
v['DHEADER_ext'] = '.di'
v['D_HDR_F'] = ['-H', '-Hf']
def detect(conf):
conf.find_dmd()
conf.check_tool('ar')
conf.check_tool('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if conf.env.D_COMPILER.find('ldc') > -1:
conf.common_flags_ldc()

38
tools/wafadmin/Tools/gas.py

@ -1,38 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008 (ita)
"as and gas"
import os, sys
import Task
from TaskGen import extension, taskgen, after, before
EXT_ASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
as_str = '${AS} ${ASFLAGS} ${_ASINCFLAGS} ${SRC} -o ${TGT}'
Task.simple_task_type('asm', as_str, 'PINK', ext_out='.o', shell=False)
@extension(EXT_ASM)
def asm_hook(self, node):
# create the compilation task: cpp or cc
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task = self.create_task('asm', node, node.change_ext(obj_ext))
self.compiled_tasks.append(task)
self.meths.append('asm_incflags')
@after('apply_obj_vars_cc')
@after('apply_obj_vars_cxx')
@before('apply_link')
def asm_incflags(self):
self.env.append_value('_ASINCFLAGS', self.env.ASINCFLAGS)
var = ('cxx' in self.features) and 'CXX' or 'CC'
self.env.append_value('_ASINCFLAGS', self.env['_%sINCFLAGS' % var])
def detect(conf):
conf.find_program(['gas', 'as'], var='AS')
if not conf.env.AS: conf.env.AS = conf.env.CC
#conf.env.ASFLAGS = ['-c'] <- may be necesary for .S files

137
tools/wafadmin/Tools/gcc.py

@ -1,137 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_gcc(conf):
cc = conf.find_program(['gcc', 'cc'], var='CC', mandatory=True)
cc = conf.cmd_to_list(cc)
ccroot.get_cc_version(conf, cc, gcc=True)
conf.env.CC_NAME = 'gcc'
conf.env.CC = cc
@conftest
def gcc_common_flags(conf):
v = conf.env
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
v['CCFLAGS_DEBUG'] = ['-g']
v['CCFLAGS_RELEASE'] = ['-O2']
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CCDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-shared']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
@conftest
def gcc_modifier_win32(conf):
v = conf.env
v['program_PATTERN'] = '%s.exe'
v['shlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
dest_arch = v['DEST_CPU']
if dest_arch == 'x86':
# On 32-bit x86, gcc emits a message telling the -fPIC option is ignored on this arch, so we remove that flag.
v['shlib_CCFLAGS'] = ['-DPIC'] # TODO this is a wrong define, we don't use -fPIC!
v.append_value('shlib_CCFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
# that the linker emits otherwise.
v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
@conftest
def gcc_modifier_cygwin(conf):
gcc_modifier_win32(conf)
v = conf.env
v['shlib_PATTERN'] = 'cyg%s.dll'
v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
@conftest
def gcc_modifier_darwin(conf):
v = conf.env
v['shlib_CCFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
v['shlib_LINKFLAGS'] = ['-dynamiclib']
v['shlib_PATTERN'] = 'lib%s.dylib'
v['staticlib_LINKFLAGS'] = []
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
v['SONAME_ST'] = ''
@conftest
def gcc_modifier_aix(conf):
v = conf.env
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['shlib_LINKFLAGS'] = ['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = ''
@conftest
def gcc_modifier_platform(conf):
# * set configurations specific for a platform.
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
# and if it's not recognised, it fallbacks to sys.platform.
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
gcc_modifier_func = globals().get('gcc_modifier_' + dest_os)
if gcc_modifier_func:
gcc_modifier_func(conf)
def detect(conf):
conf.find_gcc()
conf.find_cpp()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()

52
tools/wafadmin/Tools/gdc.py

@ -1,52 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2007 (dv)
import sys
import Utils, ar
from Configure import conftest
@conftest
def find_gdc(conf):
conf.find_program('gdc', var='D_COMPILER', mandatory=True)
@conftest
def common_flags_gdc(conf):
v = conf.env
# _DFLAGS _DIMPORTFLAGS
# for mory info about the meaning of this dict see dmd.py
v['DFLAGS'] = []
v['D_SRC_F'] = ''
v['D_TGT_F'] = ['-c', '-o', '']
v['DPATH_ST'] = '-I%s' # template for adding import paths
# linker
v['D_LINKER'] = v['D_COMPILER']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = ['-o', '']
v['DLIB_ST'] = '-l%s' # template for adding libs
v['DLIBPATH_ST'] = '-L%s' # template for adding libpaths
# debug levels
v['DLINKFLAGS'] = []
v['DFLAGS_OPTIMIZED'] = ['-O3']
v['DFLAGS_DEBUG'] = ['-O0']
v['DFLAGS_ULTRADEBUG'] = ['-O0']
v['D_shlib_DFLAGS'] = []
v['D_shlib_LINKFLAGS'] = ['-shared']
v['DHEADER_ext'] = '.di'
v['D_HDR_F'] = '-fintfc -fintfc-file='
def detect(conf):
conf.find_gdc()
conf.check_tool('ar')
conf.check_tool('d')
conf.common_flags_gdc()
conf.d_platform_flags()

111
tools/wafadmin/Tools/gnu_dirs.py

@ -1,111 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
"""
To use this module do not forget to call
opt.tool_options('gnu_dirs')
AND
conf.check_tool('gnu_dirs')
Add options for the standard GNU directories, this tool will add the options
found in autotools, and will update the environment with the following
installation variables:
* PREFIX : architecture-independent files [/usr/local]
* EXEC_PREFIX : architecture-dependent files [PREFIX]
* BINDIR : user executables [EXEC_PREFIX/bin]
* SBINDIR : user executables [EXEC_PREFIX/sbin]
* LIBEXECDIR : program executables [EXEC_PREFIX/libexec]
* SYSCONFDIR : read-only single-machine data [PREFIX/etc]
* SHAREDSTATEDIR : modifiable architecture-independent data [PREFIX/com]
* LOCALSTATEDIR : modifiable single-machine data [PREFIX/var]
* LIBDIR : object code libraries [EXEC_PREFIX/lib]
* INCLUDEDIR : C header files [PREFIX/include]
* OLDINCLUDEDIR : C header files for non-gcc [/usr/include]
* DATAROOTDIR : read-only arch.-independent data root [PREFIX/share]
* DATADIR : read-only architecture-independent data [DATAROOTDIR]
* INFODIR : info documentation [DATAROOTDIR/info]
* LOCALEDIR : locale-dependent data [DATAROOTDIR/locale]
* MANDIR : man documentation [DATAROOTDIR/man]
* DOCDIR : documentation root [DATAROOTDIR/doc/telepathy-glib]
* HTMLDIR : html documentation [DOCDIR]
* DVIDIR : dvi documentation [DOCDIR]
* PDFDIR : pdf documentation [DOCDIR]
* PSDIR : ps documentation [DOCDIR]
"""
import Utils, Options
_options = [x.split(', ') for x in '''
bindir, user executables, ${EXEC_PREFIX}/bin
sbindir, system admin executables, ${EXEC_PREFIX}/sbin
libexecdir, program executables, ${EXEC_PREFIX}/libexec
sysconfdir, read-only single-machine data, ${PREFIX}/etc
sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
localstatedir, modifiable single-machine data, ${PREFIX}/var
libdir, object code libraries, ${EXEC_PREFIX}/lib
includedir, C header files, ${PREFIX}/include
oldincludedir, C header files for non-gcc, /usr/include
datarootdir, read-only arch.-independent data root, ${PREFIX}/share
datadir, read-only architecture-independent data, ${DATAROOTDIR}
infodir, info documentation, ${DATAROOTDIR}/info
localedir, locale-dependent data, ${DATAROOTDIR}/locale
mandir, man documentation, ${DATAROOTDIR}/man
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
htmldir, html documentation, ${DOCDIR}
dvidir, dvi documentation, ${DOCDIR}
pdfdir, pdf documentation, ${DOCDIR}
psdir, ps documentation, ${DOCDIR}
'''.split('\n') if x]
def detect(conf):
def get_param(varname, default):
return getattr(Options.options, varname, '') or default
env = conf.env
env['EXEC_PREFIX'] = get_param('EXEC_PREFIX', env['PREFIX'])
env['PACKAGE'] = Utils.g_module.APPNAME
complete = False
iter = 0
while not complete and iter < len(_options) + 1:
iter += 1
complete = True
for name, help, default in _options:
name = name.upper()
if not env[name]:
try:
env[name] = Utils.subst_vars(get_param(name, default), env)
except TypeError:
complete = False
if not complete:
lst = [name for name, _, _ in _options if not env[name.upper()]]
raise Utils.WafError('Variable substitution failure %r' % lst)
def set_options(opt):
inst_dir = opt.add_option_group('Installation directories',
'By default, "waf install" will put the files in\
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
for k in ('--prefix', '--destdir'):
option = opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
inst_dir.add_option(option)
inst_dir.add_option('--exec-prefix',
help = 'installation prefix [Default: ${PREFIX}]',
default = '',
dest = 'EXEC_PREFIX')
dirs_options = opt.add_option_group('Pre-defined installation directories', '')
for name, help, default in _options:
option_name = '--' + name
str_default = default
str_help = '%s [Default: %s]' % (help, str_default)
dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())

18
tools/wafadmin/Tools/gob2.py

@ -1,18 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
import TaskGen
TaskGen.declare_chain(
name = 'gob2',
rule = '${GOB2} -o ${TGT[0].bld_dir(env)} ${GOB2FLAGS} ${SRC}',
ext_in = '.gob',
ext_out = '.c'
)
def detect(conf):
gob2 = conf.find_program('gob2', var='GOB2', mandatory=True)
conf.env['GOB2'] = gob2
conf.env['GOB2FLAGS'] = ''

134
tools/wafadmin/Tools/gxx.py

@ -1,134 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_gxx(conf):
cxx = conf.find_program(['g++', 'c++'], var='CXX', mandatory=True)
cxx = conf.cmd_to_list(cxx)
ccroot.get_cc_version(conf, cxx, gcc=True)
conf.env.CXX_NAME = 'gcc'
conf.env.CXX = cxx
@conftest
def gxx_common_flags(conf):
v = conf.env
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
v['CXXFLAGS_DEBUG'] = ['-g']
v['CXXFLAGS_RELEASE'] = ['-O2']
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = ''
v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CXXDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h,%s'
v['SHLIB_MARKER'] = '-Wl,-Bdynamic'
v['STATICLIB_MARKER'] = '-Wl,-Bstatic'
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-shared']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Wl,-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
# osx stuff
v['LINKFLAGS_MACBUNDLE'] = ['-bundle', '-undefined', 'dynamic_lookup']
v['CCFLAGS_MACBUNDLE'] = ['-fPIC']
v['macbundle_PATTERN'] = '%s.bundle'
@conftest
def gxx_modifier_win32(conf):
v = conf.env
v['program_PATTERN'] = '%s.exe'
v['shlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = 'lib%s.dll.a'
v['IMPLIB_ST'] = '-Wl,--out-implib,%s'
dest_arch = v['DEST_CPU']
if dest_arch == 'x86':
# On 32-bit x86, gcc emits a message telling the -fPIC option is ignored on this arch, so we remove that flag.
v['shlib_CXXFLAGS'] = ['-DPIC'] # TODO this is a wrong define, we don't use -fPIC!
v.append_value('shlib_CXXFLAGS', '-DDLL_EXPORT') # TODO adding nonstandard defines like this DLL_EXPORT is not a good idea
# Auto-import is enabled by default even without this option,
# but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
# that the linker emits otherwise.
v.append_value('LINKFLAGS', '-Wl,--enable-auto-import')
@conftest
def gxx_modifier_cygwin(conf):
gxx_modifier_win32(conf)
v = conf.env
v['shlib_PATTERN'] = 'cyg%s.dll'
v.append_value('shlib_LINKFLAGS', '-Wl,--enable-auto-image-base')
@conftest
def gxx_modifier_darwin(conf):
v = conf.env
v['shlib_CXXFLAGS'] = ['-fPIC', '-compatibility_version', '1', '-current_version', '1']
v['shlib_LINKFLAGS'] = ['-dynamiclib']
v['shlib_PATTERN'] = 'lib%s.dylib'
v['staticlib_LINKFLAGS'] = []
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
v['SONAME_ST'] = ''
@conftest
def gxx_modifier_aix(conf):
v = conf.env
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['shlib_LINKFLAGS'] = ['-shared', '-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER'] = ''
@conftest
def gxx_modifier_platform(conf):
# * set configurations specific for a platform.
# * the destination platform is detected automatically by looking at the macros the compiler predefines,
# and if it's not recognised, it fallbacks to sys.platform.
dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
gxx_modifier_func = globals().get('gxx_modifier_' + dest_os)
if gxx_modifier_func:
gxx_modifier_func(conf)
def detect(conf):
conf.find_gxx()
conf.find_cpp()
conf.find_ar()
conf.gxx_common_flags()
conf.gxx_modifier_platform()
conf.cxx_load_tools()
conf.cxx_add_flags()

37
tools/wafadmin/Tools/icc.py

@ -1,37 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Stian Selnes, 2008
# Thomas Nagy 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar, gcc
from Configure import conftest
@conftest
def find_icc(conf):
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
if not cc: cc = conf.find_program('icc', var='CC')
if not cc: cc = conf.find_program('ICL', var='CC')
if not cc: conf.fatal('Intel C Compiler (icc) was not found')
cc = conf.cmd_to_list(cc)
ccroot.get_cc_version(conf, cc, icc=True)
v['CC'] = cc
v['CC_NAME'] = 'icc'
detect = '''
find_icc
find_ar
gcc_common_flags
gcc_modifier_platform
cc_load_tools
cc_add_flags
link_add_flags
'''

34
tools/wafadmin/Tools/icpc.py

@ -1,34 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar, gxx
from Configure import conftest
@conftest
def find_icpc(conf):
if sys.platform == 'cygwin':
conf.fatal('The Intel compiler does not work on Cygwin')
v = conf.env
cxx = None
if v['CXX']: cxx = v['CXX']
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
if not cxx: cxx = conf.find_program('icpc', var='CXX')
if not cxx: conf.fatal('Intel C++ Compiler (icpc) was not found')
cxx = conf.cmd_to_list(cxx)
ccroot.get_cc_version(conf, cxx, icc=True)
v['CXX'] = cxx
v['CXX_NAME'] = 'icc'
detect = '''
find_icpc
find_ar
gxx_common_flags
gxx_modifier_platform
cxx_load_tools
cxx_add_flags
'''

139
tools/wafadmin/Tools/intltool.py

@ -1,139 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"intltool support"
import os, re
import Configure, TaskGen, Task, Utils, Runner, Options, Build, config_c
from TaskGen import feature, before, taskgen
from Logs import error
"""
Usage:
bld(features='intltool_in', source='a.po b.po', podir='po', cache='.intlcache', flags='')
"""
class intltool_in_taskgen(TaskGen.task_gen):
"""deprecated"""
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@before('apply_core')
@feature('intltool_in')
def iapply_intltool_in_f(self):
try: self.meths.remove('apply_core')
except ValueError: pass
for i in self.to_list(self.source):
node = self.path.find_resource(i)
podir = getattr(self, 'podir', 'po')
podirnode = self.path.find_dir(podir)
if not podirnode:
error("could not find the podir %r" % podir)
continue
cache = getattr(self, 'intlcache', '.intlcache')
self.env['INTLCACHE'] = os.path.join(self.path.bldpath(self.env), podir, cache)
self.env['INTLPODIR'] = podirnode.srcpath(self.env)
self.env['INTLFLAGS'] = getattr(self, 'flags', ['-q', '-u', '-c'])
task = self.create_task('intltool', node, node.change_ext(''))
task.install_path = self.install_path
class intltool_po_taskgen(TaskGen.task_gen):
"""deprecated"""
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('intltool_po')
def apply_intltool_po(self):
try: self.meths.remove('apply_core')
except ValueError: pass
self.default_install_path = '${LOCALEDIR}'
appname = getattr(self, 'appname', 'set_your_app_name')
podir = getattr(self, 'podir', '')
def install_translation(task):
out = task.outputs[0]
filename = out.name
(langname, ext) = os.path.splitext(filename)
inst_file = langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
self.bld.install_as(os.path.join(self.install_path, inst_file), out, self.env, self.chmod)
linguas = self.path.find_resource(os.path.join(podir, 'LINGUAS'))
if linguas:
# scan LINGUAS file for locales to process
file = open(linguas.abspath())
langs = []
for line in file.readlines():
# ignore lines containing comments
if not line.startswith('#'):
langs += line.split()
file.close()
re_linguas = re.compile('[-a-zA-Z_@.]+')
for lang in langs:
# Make sure that we only process lines which contain locales
if re_linguas.match(lang):
node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
task = self.create_task('po')
task.set_inputs(node)
task.set_outputs(node.change_ext('.mo'))
if self.bld.is_install: task.install = install_translation
else:
Utils.pprint('RED', "Error no LINGUAS file found in po directory")
Task.simple_task_type('po', '${POCOM} -o ${TGT} ${SRC}', color='BLUE', shell=False)
Task.simple_task_type('intltool',
'${INTLTOOL} ${INTLFLAGS} ${INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}',
color='BLUE', after="cc_link cxx_link", shell=False)
def detect(conf):
pocom = conf.find_program('msgfmt')
if not pocom:
# if msgfmt should not be mandatory, catch the thrown exception in your wscript
conf.fatal('The program msgfmt (gettext) is mandatory!')
conf.env['POCOM'] = pocom
# NOTE: it is possible to set INTLTOOL in the environment, but it must not have spaces in it
intltool = conf.find_program('intltool-merge', var='INTLTOOL')
if not intltool:
# if intltool-merge should not be mandatory, catch the thrown exception in your wscript
if Options.platform == 'win32':
perl = conf.find_program('perl', var='PERL')
if not perl:
conf.fatal('The program perl (required by intltool) could not be found')
intltooldir = Configure.find_file('intltool-merge', os.environ['PATH'].split(os.pathsep))
if not intltooldir:
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
conf.env['INTLTOOL'] = Utils.to_list(conf.env['PERL']) + [intltooldir + os.sep + 'intltool-merge']
conf.check_message('intltool', '', True, ' '.join(conf.env['INTLTOOL']))
else:
conf.fatal('The program intltool-merge (intltool, gettext-devel) is mandatory!')
def getstr(varname):
return getattr(Options.options, varname, '')
prefix = conf.env['PREFIX']
datadir = getstr('datadir')
if not datadir: datadir = os.path.join(prefix,'share')
conf.define('LOCALEDIR', os.path.join(datadir, 'locale'))
conf.define('DATADIR', datadir)
if conf.env['CC'] or conf.env['CXX']:
# Define to 1 if <locale.h> is present
conf.check(header_name='locale.h')
def set_options(opt):
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
opt.add_option('--datadir', type='string', default='', dest='datadir', help='read-only application data')

330
tools/wafadmin/Tools/libtool.py

@ -1,330 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Matthias Jahn, 2008, jahn matthias ath freenet punto de
# Thomas Nagy, 2008 (ita)
import sys, re, os, optparse
import TaskGen, Task, Utils, preproc
from Logs import error, debug, warn
from TaskGen import taskgen, after, before, feature
REVISION="0.1.3"
"""
if you want to use the code here, you must use something like this:
obj = obj.create(...)
obj.features.append("libtool")
obj.vnum = "1.2.3" # optional, but versioned libraries are common
"""
# fake libtool files
fakelibtool_vardeps = ['CXX', 'PREFIX']
def fakelibtool_build(task):
# Writes a .la file, used by libtool
env = task.env
dest = open(task.outputs[0].abspath(env), 'w')
sname = task.inputs[0].name
fu = dest.write
fu("# Generated by ltmain.sh - GNU libtool 1.5.18 - (pwn3d by BKsys II code name WAF)\n")
if env['vnum']:
nums = env['vnum'].split('.')
libname = task.inputs[0].name
name3 = libname+'.'+env['vnum']
name2 = libname+'.'+nums[0]
name1 = libname
fu("dlname='%s'\n" % name2)
strn = " ".join([name3, name2, name1])
fu("library_names='%s'\n" % (strn) )
else:
fu("dlname='%s'\n" % sname)
fu("library_names='%s %s %s'\n" % (sname, sname, sname) )
fu("old_library=''\n")
vars = ' '.join(env['libtoolvars']+env['LINKFLAGS'])
fu("dependency_libs='%s'\n" % vars)
fu("current=0\n")
fu("age=0\nrevision=0\ninstalled=yes\nshouldnotlink=no\n")
fu("dlopen=''\ndlpreopen=''\n")
fu("libdir='%s/lib'\n" % env['PREFIX'])
dest.close()
return 0
def read_la_file(path):
sp = re.compile(r'^([^=]+)=\'(.*)\'$')
dc={}
file = open(path, "r")
for line in file.readlines():
try:
#print sp.split(line.strip())
_, left, right, _ = sp.split(line.strip())
dc[left]=right
except ValueError:
pass
file.close()
return dc
@feature("libtool")
@after('apply_link')
def apply_link_libtool(self):
if self.type != 'program':
linktask = self.link_task
self.latask = self.create_task('fakelibtool', linktask.outputs, linktask.outputs[0].change_ext('.la'))
if self.bld.is_install:
self.bld.install_files('${PREFIX}/lib', linktask.outputs[0], self.env)
@feature("libtool")
@before('apply_core')
def apply_libtool(self):
self.env['vnum']=self.vnum
paths=[]
libs=[]
libtool_files=[]
libtool_vars=[]
for l in self.env['LINKFLAGS']:
if l[:2]=='-L':
paths.append(l[2:])
elif l[:2]=='-l':
libs.append(l[2:])
for l in libs:
for p in paths:
dict = read_la_file(p+'/lib'+l+'.la')
linkflags2 = dict.get('dependency_libs', '')
for v in linkflags2.split():
if v.endswith('.la'):
libtool_files.append(v)
libtool_vars.append(v)
continue
self.env.append_unique('LINKFLAGS', v)
break
self.env['libtoolvars']=libtool_vars
while libtool_files:
file = libtool_files.pop()
dict = read_la_file(file)
for v in dict['dependency_libs'].split():
if v[-3:] == '.la':
libtool_files.append(v)
continue
self.env.append_unique('LINKFLAGS', v)
Task.task_type_from_func('fakelibtool', vars=fakelibtool_vardeps, func=fakelibtool_build, color='BLUE', after="cc_link cxx_link static_link")
class libtool_la_file:
def __init__ (self, la_filename):
self.__la_filename = la_filename
#remove path and .la suffix
self.linkname = str(os.path.split(la_filename)[-1])[:-3]
if self.linkname.startswith("lib"):
self.linkname = self.linkname[3:]
# The name that we can dlopen(3).
self.dlname = None
# Names of this library
self.library_names = None
# The name of the static archive.
self.old_library = None
# Libraries that this one depends upon.
self.dependency_libs = None
# Version information for libIlmImf.
self.current = None
self.age = None
self.revision = None
# Is this an already installed library?
self.installed = None
# Should we warn about portability when linking against -modules?
self.shouldnotlink = None
# Files to dlopen/dlpreopen
self.dlopen = None
self.dlpreopen = None
# Directory that this library needs to be installed in:
self.libdir = '/usr/lib'
if not self.__parse():
raise "file %s not found!!" %(la_filename)
def __parse(self):
"Retrieve the variables from a file"
if not os.path.isfile(self.__la_filename): return 0
la_file=open(self.__la_filename, 'r')
for line in la_file:
ln = line.strip()
if not ln: continue
if ln[0]=='#': continue
(key, value) = str(ln).split('=', 1)
key = key.strip()
value = value.strip()
if value == "no": value = False
elif value == "yes": value = True
else:
try: value = int(value)
except ValueError: value = value.strip("'")
setattr(self, key, value)
la_file.close()
return 1
def get_libs(self):
"""return linkflags for this lib"""
libs = []
if self.dependency_libs:
libs = str(self.dependency_libs).strip().split()
if libs == None:
libs = []
# add la lib and libdir
libs.insert(0, "-l%s" % self.linkname.strip())
libs.insert(0, "-L%s" % self.libdir.strip())
return libs
def __str__(self):
return '''\
dlname = "%(dlname)s"
library_names = "%(library_names)s"
old_library = "%(old_library)s"
dependency_libs = "%(dependency_libs)s"
version = %(current)s.%(age)s.%(revision)s
installed = "%(installed)s"
shouldnotlink = "%(shouldnotlink)s"
dlopen = "%(dlopen)s"
dlpreopen = "%(dlpreopen)s"
libdir = "%(libdir)s"''' % self.__dict__
class libtool_config:
def __init__ (self, la_filename):
self.__libtool_la_file = libtool_la_file(la_filename)
tmp = self.__libtool_la_file
self.__version = [int(tmp.current), int(tmp.age), int(tmp.revision)]
self.__sub_la_files = []
self.__sub_la_files.append(la_filename)
self.__libs = None
def __cmp__(self, other):
"""make it compareable with X.Y.Z versions (Y and Z are optional)"""
if not other:
return 1
othervers = [int(s) for s in str(other).split(".")]
selfvers = self.__version
return cmp(selfvers, othervers)
def __str__(self):
return "\n".join([
str(self.__libtool_la_file),
' '.join(self.__libtool_la_file.get_libs()),
'* New getlibs:',
' '.join(self.get_libs())
])
def __get_la_libs(self, la_filename):
return libtool_la_file(la_filename).get_libs()
def get_libs(self):
"""return the complete uniqe linkflags that do not
contain .la files anymore"""
libs_list = list(self.__libtool_la_file.get_libs())
libs_map = {}
while len(libs_list) > 0:
entry = libs_list.pop(0)
if entry:
if str(entry).endswith(".la"):
## prevents duplicate .la checks
if entry not in self.__sub_la_files:
self.__sub_la_files.append(entry)
libs_list.extend(self.__get_la_libs(entry))
else:
libs_map[entry]=1
self.__libs = libs_map.keys()
return self.__libs
def get_libs_only_L(self):
if not self.__libs: self.get_libs()
libs = self.__libs
libs = [s for s in libs if str(s).startswith('-L')]
return libs
def get_libs_only_l(self):
if not self.__libs: self.get_libs()
libs = self.__libs
libs = [s for s in libs if str(s).startswith('-l')]
return libs
def get_libs_only_other(self):
if not self.__libs: self.get_libs()
libs = self.__libs
libs = [s for s in libs if not(str(s).startswith('-L')or str(s).startswith('-l'))]
return libs
def useCmdLine():
"""parse cmdline args and control build"""
usage = '''Usage: %prog [options] PathToFile.la
example: %prog --atleast-version=2.0.0 /usr/lib/libIlmImf.la
nor: %prog --libs /usr/lib/libamarok.la'''
parser = optparse.OptionParser(usage)
a = parser.add_option
a("--version", dest = "versionNumber",
action = "store_true", default = False,
help = "output version of libtool-config"
)
a("--debug", dest = "debug",
action = "store_true", default = False,
help = "enable debug"
)
a("--libs", dest = "libs",
action = "store_true", default = False,
help = "output all linker flags"
)
a("--libs-only-l", dest = "libs_only_l",
action = "store_true", default = False,
help = "output -l flags"
)
a("--libs-only-L", dest = "libs_only_L",
action = "store_true", default = False,
help = "output -L flags"
)
a("--libs-only-other", dest = "libs_only_other",
action = "store_true", default = False,
help = "output other libs (e.g. -pthread)"
)
a("--atleast-version", dest = "atleast_version",
default=None,
help = "return 0 if the module is at least version ATLEAST_VERSION"
)
a("--exact-version", dest = "exact_version",
default=None,
help = "return 0 if the module is exactly version EXACT_VERSION"
)
a("--max-version", dest = "max_version",
default=None,
help = "return 0 if the module is at no newer than version MAX_VERSION"
)
(options, args) = parser.parse_args()
if len(args) != 1 and not options.versionNumber:
parser.error("incorrect number of arguments")
if options.versionNumber:
print("libtool-config version %s" % REVISION)
return 0
ltf = libtool_config(args[0])
if options.debug:
print(ltf)
if options.atleast_version:
if ltf >= options.atleast_version: return 0
sys.exit(1)
if options.exact_version:
if ltf == options.exact_version: return 0
sys.exit(1)
if options.max_version:
if ltf <= options.max_version: return 0
sys.exit(1)
def p(x):
print(" ".join(x))
if options.libs: p(ltf.get_libs())
elif options.libs_only_l: p(ltf.get_libs_only_l())
elif options.libs_only_L: p(ltf.get_libs_only_L())
elif options.libs_only_other: p(ltf.get_libs_only_other())
return 0
if __name__ == '__main__':
useCmdLine()

430
tools/wafadmin/Tools/misc.py

@ -1,430 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"""
Custom objects:
- execute a function everytime
- copy a file somewhere else
"""
import shutil, re, os
import TaskGen, Node, Task, Utils, Build, Constants
from TaskGen import feature, taskgen, after, before
from Logs import debug
def copy_func(tsk):
"Make a file copy. This might be used to make other kinds of file processing (even calling a compiler is possible)"
env = tsk.env
infile = tsk.inputs[0].abspath(env)
outfile = tsk.outputs[0].abspath(env)
try:
shutil.copy2(infile, outfile)
except (OSError, IOError):
return 1
else:
if tsk.chmod: os.chmod(outfile, tsk.chmod)
return 0
def action_process_file_func(tsk):
"Ask the function attached to the task to process it"
if not tsk.fun: raise Utils.WafError('task must have a function attached to it for copy_func to work!')
return tsk.fun(tsk)
class cmd_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('cmd')
def apply_cmd(self):
"call a command everytime"
if not self.fun: raise Utils.WafError('cmdobj needs a function!')
tsk = Task.TaskBase()
tsk.fun = self.fun
tsk.env = self.env
self.tasks.append(tsk)
tsk.install_path = self.install_path
class copy_taskgen(TaskGen.task_gen):
"By default, make a file copy, if fun is provided, fun will make the copy (or call a compiler, etc)"
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('copy')
@before('apply_core')
def apply_copy(self):
Utils.def_attrs(self, fun=copy_func)
self.default_install_path = 0
lst = self.to_list(self.source)
self.meths.remove('apply_core')
for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
target = self.target
if not target or len(lst)>1: target = node.name
# TODO the file path may be incorrect
newnode = self.path.find_or_declare(target)
tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.chmod = self.chmod
tsk.install_path = self.install_path
if not tsk.env:
tsk.debug()
raise Utils.WafError('task without an environment')
def subst_func(tsk):
"Substitutes variables in a .in file"
m4_re = re.compile('@(\w+)@', re.M)
env = tsk.env
infile = tsk.inputs[0].abspath(env)
outfile = tsk.outputs[0].abspath(env)
code = Utils.readf(infile)
# replace all % by %% to prevent errors by % signs in the input file while string formatting
code = code.replace('%', '%%')
s = m4_re.sub(r'%(\1)s', code)
di = tsk.dict or {}
if not di:
names = m4_re.findall(code)
for i in names:
di[i] = env.get_flat(i) or env.get_flat(i.upper())
file = open(outfile, 'w')
file.write(s % di)
file.close()
if tsk.chmod: os.chmod(outfile, tsk.chmod)
class subst_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('subst')
@before('apply_core')
def apply_subst(self):
Utils.def_attrs(self, fun=subst_func)
self.default_install_path = 0
lst = self.to_list(self.source)
self.meths.remove('apply_core')
self.dict = getattr(self, 'dict', {})
for filename in lst:
node = self.path.find_resource(filename)
if not node: raise Utils.WafError('cannot find input file %s for processing' % filename)
if self.target:
newnode = self.path.find_or_declare(self.target)
else:
newnode = node.change_ext('')
try:
self.dict = self.dict.get_merged_dict()
except AttributeError:
pass
if self.dict and not self.env['DICT_HASH']:
self.env = self.env.copy()
keys = list(self.dict.keys())
keys.sort()
lst = [self.dict[x] for x in keys]
self.env['DICT_HASH'] = str(Utils.h_list(lst))
tsk = self.create_task('copy', node, newnode)
tsk.fun = self.fun
tsk.dict = self.dict
tsk.dep_vars = ['DICT_HASH']
tsk.install_path = self.install_path
tsk.chmod = self.chmod
if not tsk.env:
tsk.debug()
raise Utils.WafError('task without an environment')
####################
## command-output ####
####################
class cmd_arg(object):
"""command-output arguments for representing files or folders"""
def __init__(self, name, template='%s'):
self.name = name
self.template = template
self.node = None
class input_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_resource(self.name)
if self.node is None:
raise Utils.WafError("Input file %s not found in " % (self.name, base_path))
def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath(env)
else:
return self.template % self.node.srcpath(env)
class output_file(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_or_declare(self.name)
if self.node is None:
raise Utils.WafError("Output file %s not found in " % (self.name, base_path))
def get_path(self, env, absolute):
if absolute:
return self.template % self.node.abspath(env)
else:
return self.template % self.node.bldpath(env)
class cmd_dir_arg(cmd_arg):
def find_node(self, base_path):
assert isinstance(base_path, Node.Node)
self.node = base_path.find_dir(self.name)
if self.node is None:
raise Utils.WafError("Directory %s not found in " % (self.name, base_path))
class input_dir(cmd_dir_arg):
def get_path(self, dummy_env, dummy_absolute):
return self.template % self.node.abspath()
class output_dir(cmd_dir_arg):
def get_path(self, env, dummy_absolute):
return self.template % self.node.abspath(env)
class command_output(Task.Task):
color = "BLUE"
def __init__(self, env, command, command_node, command_args, stdin, stdout, cwd, os_env, stderr):
Task.Task.__init__(self, env, normal=1)
assert isinstance(command, (str, Node.Node))
self.command = command
self.command_args = command_args
self.stdin = stdin
self.stdout = stdout
self.cwd = cwd
self.os_env = os_env
self.stderr = stderr
if command_node is not None: self.dep_nodes = [command_node]
self.dep_vars = [] # additional environment variables to look
def run(self):
task = self
#assert len(task.inputs) > 0
def input_path(node, template):
if task.cwd is None:
return template % node.bldpath(task.env)
else:
return template % node.abspath()
def output_path(node, template):
fun = node.abspath
if task.cwd is None: fun = node.bldpath
return template % fun(task.env)
if isinstance(task.command, Node.Node):
argv = [input_path(task.command, '%s')]
else:
argv = [task.command]
for arg in task.command_args:
if isinstance(arg, str):
argv.append(arg)
else:
assert isinstance(arg, cmd_arg)
argv.append(arg.get_path(task.env, (task.cwd is not None)))
if task.stdin:
stdin = open(input_path(task.stdin, '%s'))
else:
stdin = None
if task.stdout:
stdout = open(output_path(task.stdout, '%s'), "w")
else:
stdout = None
if task.stderr:
stderr = open(output_path(task.stderr, '%s'), "w")
else:
stderr = None
if task.cwd is None:
cwd = ('None (actually %r)' % os.getcwd())
else:
cwd = repr(task.cwd)
debug("command-output: cwd=%s, stdin=%r, stdout=%r, argv=%r" %
(cwd, stdin, stdout, argv))
if task.os_env is None:
os_env = os.environ
else:
os_env = task.os_env
command = Utils.pproc.Popen(argv, stdin=stdin, stdout=stdout, stderr=stderr, cwd=task.cwd, env=os_env)
return command.wait()
class cmd_output_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('command-output')
def init_cmd_output(self):
Utils.def_attrs(self,
stdin = None,
stdout = None,
stderr = None,
# the command to execute
command = None,
# whether it is an external command; otherwise it is assumed
# to be an executable binary or script that lives in the
# source or build tree.
command_is_external = False,
# extra parameters (argv) to pass to the command (excluding
# the command itself)
argv = [],
# dependencies to other objects -> this is probably not what you want (ita)
# values must be 'task_gen' instances (not names!)
dependencies = [],
# dependencies on env variable contents
dep_vars = [],
# input files that are implicit, i.e. they are not
# stdin, nor are they mentioned explicitly in argv
hidden_inputs = [],
# output files that are implicit, i.e. they are not
# stdout, nor are they mentioned explicitly in argv
hidden_outputs = [],
# change the subprocess to this cwd (must use obj.input_dir() or output_dir() here)
cwd = None,
# OS environment variables to pass to the subprocess
# if None, use the default environment variables unchanged
os_env = None)
@feature('command-output')
@after('init_cmd_output')
def apply_cmd_output(self):
if self.command is None:
raise Utils.WafError("command-output missing command")
if self.command_is_external:
cmd = self.command
cmd_node = None
else:
cmd_node = self.path.find_resource(self.command)
assert cmd_node is not None, ('''Could not find command '%s' in source tree.
Hint: if this is an external command,
use command_is_external=True''') % (self.command,)
cmd = cmd_node
if self.cwd is None:
cwd = None
else:
assert isinstance(cwd, CmdDirArg)
self.cwd.find_node(self.path)
args = []
inputs = []
outputs = []
for arg in self.argv:
if isinstance(arg, cmd_arg):
arg.find_node(self.path)
if isinstance(arg, input_file):
inputs.append(arg.node)
if isinstance(arg, output_file):
outputs.append(arg.node)
if self.stdout is None:
stdout = None
else:
assert isinstance(self.stdout, str)
stdout = self.path.find_or_declare(self.stdout)
if stdout is None:
raise Utils.WafError("File %s not found" % (self.stdout,))
outputs.append(stdout)
if self.stderr is None:
stderr = None
else:
assert isinstance(self.stderr, str)
stderr = self.path.find_or_declare(self.stderr)
if stderr is None:
raise Utils.WafError("File %s not found" % (self.stderr,))
outputs.append(stderr)
if self.stdin is None:
stdin = None
else:
assert isinstance(self.stdin, str)
stdin = self.path.find_resource(self.stdin)
if stdin is None:
raise Utils.WafError("File %s not found" % (self.stdin,))
inputs.append(stdin)
for hidden_input in self.to_list(self.hidden_inputs):
node = self.path.find_resource(hidden_input)
if node is None:
raise Utils.WafError("File %s not found in dir %s" % (hidden_input, self.path))
inputs.append(node)
for hidden_output in self.to_list(self.hidden_outputs):
node = self.path.find_or_declare(hidden_output)
if node is None:
raise Utils.WafError("File %s not found in dir %s" % (hidden_output, self.path))
outputs.append(node)
if not (inputs or getattr(self, 'no_inputs', None)):
raise Utils.WafError('command-output objects must have at least one input file or give self.no_inputs')
if not (outputs or getattr(self, 'no_outputs', None)):
raise Utils.WafError('command-output objects must have at least one output file or give self.no_outputs')
task = command_output(self.env, cmd, cmd_node, self.argv, stdin, stdout, cwd, self.os_env, stderr)
Utils.copy_attrs(self, task, 'before after ext_in ext_out', only_if_set=True)
self.tasks.append(task)
task.inputs = inputs
task.outputs = outputs
task.dep_vars = self.to_list(self.dep_vars)
for dep in self.dependencies:
assert dep is not self
dep.post()
for dep_task in dep.tasks:
task.set_run_after(dep_task)
if not task.inputs:
# the case for svnversion, always run, and update the output nodes
task.runnable_status = type(Task.TaskBase.run)(runnable_status, task, task.__class__) # always run
task.post_run = type(Task.TaskBase.run)(post_run, task, task.__class__)
# TODO the case with no outputs?
def post_run(self):
for x in self.outputs:
h = Utils.h_file(x.abspath(self.env))
self.generator.bld.node_sigs[self.env.variant()][x.id] = h
def runnable_status(self):
return Constants.RUN_ME
Task.task_type_from_func('copy', vars=[], func=action_process_file_func)
TaskGen.task_gen.classes['command-output'] = cmd_output_taskgen

49
tools/wafadmin/Tools/nasm.py

@ -1,49 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008
"""
Nasm processing
"""
import os
import TaskGen, Task, Utils
from TaskGen import taskgen, before, extension
nasm_str = '${NASM} ${NASM_FLAGS} ${NASM_INCLUDES} ${SRC} -o ${TGT}'
EXT_NASM = ['.s', '.S', '.asm', '.ASM', '.spp', '.SPP']
@before('apply_link')
def apply_nasm_vars(self):
# flags
if hasattr(self, 'nasm_flags'):
for flag in self.to_list(self.nasm_flags):
self.env.append_value('NASM_FLAGS', flag)
# includes - well, if we suppose it works with c processing
if hasattr(self, 'includes'):
for inc in self.to_list(self.includes):
node = self.path.find_dir(inc)
if not node:
raise Utils.WafError('cannot find the dir' + inc)
self.env.append_value('NASM_INCLUDES', '-I%s' % node.srcpath(self.env))
self.env.append_value('NASM_INCLUDES', '-I%s' % node.bldpath(self.env))
@extension(EXT_NASM)
def nasm_file(self, node):
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task = self.create_task('nasm', node, node.change_ext(obj_ext))
self.compiled_tasks.append(task)
self.meths.append('apply_nasm_vars')
# create our action here
Task.simple_task_type('nasm', nasm_str, color='BLUE', ext_out='.o', shell=False)
def detect(conf):
nasm = conf.find_program(['nasm', 'yasm'], var='NASM', mandatory=True)

86
tools/wafadmin/Tools/node_addon.py

@ -1,86 +0,0 @@
import os
import TaskGen, Utils, Runner, Options, Build
from TaskGen import extension, taskgen, before, after, feature
from Configure import conf, conftest
@taskgen
@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
@feature('node_addon')
@before('apply_bundle')
def init_node_addon(self):
self.default_install_path = self.env['NODE_PATH']
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'NODE' in self.uselib: self.uselib.append('NODE')
self.env['MACBUNDLE'] = True
@taskgen
@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
@after('apply_bundle')
@feature('node_addon')
def node_addon_shlib_ext(self):
self.env['shlib_PATTERN'] = "%s.node"
def detect(conf):
join = os.path.join
conf.env['PREFIX_NODE'] = get_prefix()
prefix = conf.env['PREFIX_NODE']
lib = join(prefix, 'lib')
nodebin = join(prefix, 'bin', 'node')
conf.env['LIBPATH_NODE'] = lib
conf.env['CPPPATH_NODE'] = join(prefix, 'include', 'node')
conf.env.append_value('CPPFLAGS_NODE', '-D_GNU_SOURCE')
conf.env.append_value('CCFLAGS_NODE', '-D_LARGEFILE_SOURCE')
conf.env.append_value('CCFLAGS_NODE', '-D_FILE_OFFSET_BITS=64')
conf.env.append_value('CXXFLAGS_NODE', '-D_LARGEFILE_SOURCE')
conf.env.append_value('CXXFLAGS_NODE', '-D_FILE_OFFSET_BITS=64')
# with symbols
conf.env.append_value('CCFLAGS', ['-g'])
conf.env.append_value('CXXFLAGS', ['-g'])
# install path
conf.env['NODE_PATH'] = get_node_path()
# this changes the install path of cxx task_gen
conf.env['LIBDIR'] = conf.env['NODE_PATH']
found = os.path.exists(conf.env['NODE_PATH'])
conf.check_message('node path', '', found, conf.env['NODE_PATH'])
found = os.path.exists(nodebin)
conf.check_message('node prefix', '', found, prefix)
## On Cygwin we need to link to the generated symbol definitions
if Options.platform.startswith('cygwin'): conf.env['LIB_NODE'] = 'node'
## On Mac OSX we need to use mac bundles
if Options.platform == 'darwin':
if 'i386' in Utils.cmd_output(['file', nodebin]):
conf.env.append_value('CPPFLAGS_NODE', ['-arch', 'i386'])
conf.env.append_value('CXXFLAGS_NODE', ['-arch', 'i386'])
conf.env.append_value('LINKFLAGS', ['-arch', 'i386'])
conf.env['DEST_CPU'] = 'i386'
conf.check_tool('osx')
def get_node_path():
join = os.path.join
nodePath = None
if not os.environ.has_key('NODE_PATH'):
if not os.environ.has_key('HOME'):
nodePath = join(get_prefix(), 'lib', 'node')
else:
nodePath = join(os.environ['HOME'], '.node_libraries')
else:
nodePath = os.environ['NODE_PATH']
return nodePath
def get_prefix():
prefix = None
if not os.environ.has_key('PREFIX_NODE'):
prefix = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
else:
prefix = os.environ['PREFIX_NODE']
return prefix

187
tools/wafadmin/Tools/osx.py

@ -1,187 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy 2008
"""MacOSX related tools
To compile an executable into a Mac application bundle (a .app), set its 'mac_app' attribute
obj.mac_app = True
To make a bundled shared library (a .bundle), set the 'mac_bundle' attribute:
obj.mac_bundle = True
"""
import os, shutil, sys, platform
import TaskGen, Task, Build, Options, Utils
from TaskGen import taskgen, feature, after, before
from Logs import error, debug
# plist template
app_info = '''
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
<plist version="0.9">
<dict>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleGetInfoString</key>
<string>Created by Waf</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>NOTE</key>
<string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
<key>CFBundleExecutable</key>
<string>%s</string>
</dict>
</plist>
'''
# see WAF issue 285
# and also http://trac.macports.org/ticket/17059
@feature('cc', 'cxx')
@before('apply_lib_vars')
def set_macosx_deployment_target(self):
if self.env['MACOSX_DEPLOYMENT_TARGET']:
os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env['MACOSX_DEPLOYMENT_TARGET']
elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
if sys.platform == 'darwin':
os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
@feature('cc', 'cxx')
@after('apply_lib_vars')
def apply_framework(self):
for x in self.to_list(self.env['FRAMEWORKPATH']):
frameworkpath_st = '-F%s'
self.env.append_unique('CXXFLAGS', frameworkpath_st % x)
self.env.append_unique('CCFLAGS', frameworkpath_st % x)
self.env.append_unique('LINKFLAGS', frameworkpath_st % x)
for x in self.to_list(self.env['FRAMEWORK']):
self.env.append_value('LINKFLAGS', ['-framework', x])
@taskgen
def create_bundle_dirs(self, name, out):
bld = self.bld
dir = out.parent.get_dir(name)
if not dir:
dir = out.__class__(name, out.parent, 1)
bld.rescan(dir)
contents = out.__class__('Contents', dir, 1)
bld.rescan(contents)
macos = out.__class__('MacOS', contents, 1)
bld.rescan(macos)
return dir
def bundle_name_for_output(out):
name = out.name
k = name.rfind('.')
if k >= 0:
name = name[:k] + '.app'
else:
name = name + '.app'
return name
@taskgen
@after('apply_link')
@feature('cprogram')
def create_task_macapp(self):
"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
or use obj.mac_app = True to build specific targets as Mac apps"""
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
apptask = self.create_task('macapp')
apptask.set_inputs(self.link_task.outputs)
out = self.link_task.outputs[0]
name = bundle_name_for_output(out)
dir = self.create_bundle_dirs(name, out)
n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
apptask.set_outputs([n1])
apptask.chmod = 0755
apptask.install_path = os.path.join(self.install_path, name, 'Contents', 'MacOS')
self.apptask = apptask
@after('apply_link')
@feature('cprogram')
def create_task_macplist(self):
"""Use env['MACAPP'] to force *all* executables to be transformed into Mac applications
or use obj.mac_app = True to build specific targets as Mac apps"""
if self.env['MACAPP'] or getattr(self, 'mac_app', False):
# check if the user specified a plist before using our template
if not getattr(self, 'mac_plist', False):
self.mac_plist = app_info
plisttask = self.create_task('macplist')
plisttask.set_inputs(self.link_task.outputs)
out = self.link_task.outputs[0]
self.mac_plist = self.mac_plist % (out.name)
name = bundle_name_for_output(out)
dir = self.create_bundle_dirs(name, out)
n1 = dir.find_or_declare(['Contents', 'Info.plist'])
plisttask.set_outputs([n1])
plisttask.mac_plist = self.mac_plist
plisttask.install_path = os.path.join(self.install_path, name, 'Contents')
self.plisttask = plisttask
@after('apply_link')
@feature('cshlib')
def apply_link_osx(self):
name = self.link_task.outputs[0].name
if not self.install_path:
return
if getattr(self, 'vnum', None):
name = name.replace('.dylib', '.%s.dylib' % self.vnum)
path = os.path.join(Utils.subst_vars(self.install_path, self.env), name)
if '-dynamiclib' in self.env['LINKFLAGS']:
self.env.append_value('LINKFLAGS', '-install_name')
self.env.append_value('LINKFLAGS', path)
@before('apply_link', 'apply_lib_vars')
@feature('cc', 'cxx')
def apply_bundle(self):
"""use env['MACBUNDLE'] to force all shlibs into mac bundles
or use obj.mac_bundle = True for specific targets only"""
if not ('cshlib' in self.features or 'shlib' in self.features): return
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
self.env['shlib_PATTERN'] = self.env['macbundle_PATTERN']
uselib = self.uselib = self.to_list(self.uselib)
if not 'MACBUNDLE' in uselib: uselib.append('MACBUNDLE')
@after('apply_link')
@feature('cshlib')
def apply_bundle_remove_dynamiclib(self):
if self.env['MACBUNDLE'] or getattr(self, 'mac_bundle', False):
if not getattr(self, 'vnum', None):
try:
self.env['LINKFLAGS'].remove('-dynamiclib')
except ValueError:
pass
# TODO REMOVE IN 1.6 (global variable)
app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
def app_build(task):
env = task.env
shutil.copy2(task.inputs[0].srcpath(env), task.outputs[0].abspath(env))
return 0
def plist_build(task):
env = task.env
f = open(task.outputs[0].abspath(env), "w")
f.write(task.mac_plist)
f.close()
return 0
Task.task_type_from_func('macapp', vars=[], func=app_build, after="cxx_link cc_link static_link")
Task.task_type_from_func('macplist', vars=[], func=plist_build, after="cxx_link cc_link static_link")

813
tools/wafadmin/Tools/preproc.py

@ -1,813 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2009 (ita)
"""
C/C++ preprocessor for finding dependencies
Reasons for using the Waf preprocessor by default
1. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
2. Not all compilers provide .d files for obtaining the dependencies (portability)
3. A naive file scanner will not catch the constructs such as "#include foo()"
4. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
Regarding the speed concerns:
a. the preprocessing is performed only when files must be compiled
b. the macros are evaluated only for #if/#elif/#include
c. the time penalty is about 10%
d. system headers are not scanned
Now if you do not want the Waf preprocessor, the tool "gccdeps" uses the .d files produced
during the compilation to track the dependencies (useful when used with the boost libraries).
It only works with gcc though, and it cannot be used with Qt builds. A dumb
file scanner will be added in the future, so we will have most bahaviours.
"""
# TODO: more varargs, pragma once
# TODO: dumb file scanner tracking all includes
import re, sys, os, string
import Logs, Build, Utils
from Logs import debug, error
import traceback
class PreprocError(Utils.WafError):
pass
POPFILE = '-'
recursion_limit = 100
"do not loop too much on header inclusion"
go_absolute = 0
"set to 1 to track headers on files in /usr/include - else absolute paths are ignored"
standard_includes = ['/usr/include']
if sys.platform == "win32":
standard_includes = []
use_trigraphs = 0
'apply the trigraph rules first'
strict_quotes = 0
"Keep <> for system includes (do not search for those includes)"
g_optrans = {
'not':'!',
'and':'&&',
'bitand':'&',
'and_eq':'&=',
'or':'||',
'bitor':'|',
'or_eq':'|=',
'xor':'^',
'xor_eq':'^=',
'compl':'~',
}
"these ops are for c++, to reset, set an empty dict"
# ignore #warning and #error
re_lines = re.compile(\
'^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
re.IGNORECASE | re.MULTILINE)
re_mac = re.compile("^[a-zA-Z_]\w*")
re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
re_cpp = re.compile(\
r"""(/\*[^*]*\*+([^/*][^*]*\*+)*/)|//[^\n]*|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^/"'\\]*)""",
re.MULTILINE)
trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
NUM = 'i'
OP = 'O'
IDENT = 'T'
STR = 's'
CHAR = 'c'
tok_types = [NUM, STR, IDENT, OP]
exp_types = [
r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
r'L?"([^"\\]|\\.)*"',
r'[a-zA-Z_]\w*',
r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
]
re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
accepted = 'a'
ignored = 'i'
undefined = 'u'
skipped = 's'
def repl(m):
s = m.group(1)
if s is not None: return ' '
s = m.group(3)
if s is None: return ''
return s
def filter_comments(filename):
# return a list of tuples : keyword, line
code = Utils.readf(filename)
if use_trigraphs:
for (a, b) in trig_def: code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_lines, code)]
prec = {}
# op -> number, needed for such expressions: #if 1 && 2 != 0
ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
for x in range(len(ops)):
syms = ops[x]
for u in syms.split():
prec[u] = x
def reduce_nums(val_1, val_2, val_op):
"""apply arithmetic rules and try to return an integer result"""
#print val_1, val_2, val_op
# now perform the operation, make certain a and b are numeric
try: a = 0 + val_1
except TypeError: a = int(val_1)
try: b = 0 + val_2
except TypeError: b = int(val_2)
d = val_op
if d == '%': c = a%b
elif d=='+': c = a+b
elif d=='-': c = a-b
elif d=='*': c = a*b
elif d=='/': c = a/b
elif d=='^': c = a^b
elif d=='|': c = a|b
elif d=='||': c = int(a or b)
elif d=='&': c = a&b
elif d=='&&': c = int(a and b)
elif d=='==': c = int(a == b)
elif d=='!=': c = int(a != b)
elif d=='<=': c = int(a <= b)
elif d=='<': c = int(a < b)
elif d=='>': c = int(a > b)
elif d=='>=': c = int(a >= b)
elif d=='^': c = int(a^b)
elif d=='<<': c = a<<b
elif d=='>>': c = a>>b
else: c = 0
return c
def get_num(lst):
if not lst: raise PreprocError("empty list for get_num")
(p, v) = lst[0]
if p == OP:
if v == '(':
count_par = 1
i = 1
while i < len(lst):
(p, v) = lst[i]
if p == OP:
if v == ')':
count_par -= 1
if count_par == 0:
break
elif v == '(':
count_par += 1
i += 1
else:
raise PreprocError("rparen expected %r" % lst)
(num, _) = get_term(lst[1:i])
return (num, lst[i+1:])
elif v == '+':
return get_num(lst[1:])
elif v == '-':
num, lst = get_num(lst[1:])
return (reduce_nums('-1', num, '*'), lst)
elif v == '!':
num, lst = get_num(lst[1:])
return (int(not int(num)), lst)
elif v == '~':
return (~ int(num), lst)
else:
raise PreprocError("invalid op token %r for get_num" % lst)
elif p == NUM:
return v, lst[1:]
elif p == IDENT:
# all macros should have been replaced, remaining identifiers eval to 0
return 0, lst[1:]
else:
raise PreprocError("invalid token %r for get_num" % lst)
def get_term(lst):
if not lst: raise PreprocError("empty list for get_term")
num, lst = get_num(lst)
if not lst:
return (num, [])
(p, v) = lst[0]
if p == OP:
if v == '&&' and not num:
return (num, [])
elif v == '||' and num:
return (num, [])
elif v == ',':
# skip
return get_term(lst[1:])
elif v == '?':
count_par = 0
i = 1
while i < len(lst):
(p, v) = lst[i]
if p == OP:
if v == ')':
count_par -= 1
elif v == '(':
count_par += 1
elif v == ':':
if count_par == 0:
break
i += 1
else:
raise PreprocError("rparen expected %r" % lst)
if int(num):
return get_term(lst[1:i])
else:
return get_term(lst[i+1:])
else:
num2, lst = get_num(lst[1:])
if not lst:
# no more tokens to process
num2 = reduce_nums(num, num2, v)
return get_term([(NUM, num2)] + lst)
# operator precedence
p2, v2 = lst[0]
if p2 != OP:
raise PreprocError("op expected %r" % lst)
if prec[v2] >= prec[v]:
num2 = reduce_nums(num, num2, v)
return get_term([(NUM, num2)] + lst)
else:
num3, lst = get_num(lst[1:])
num3 = reduce_nums(num2, num3, v2)
return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
raise PreprocError("cannot reduce %r" % lst)
def reduce_eval(lst):
"""take a list of tokens and output true or false (#if/#elif conditions)"""
num, lst = get_term(lst)
return (NUM, num)
def stringize(lst):
"""use for converting a list of tokens to a string"""
lst = [str(v2) for (p2, v2) in lst]
return "".join(lst)
def paste_tokens(t1, t2):
"""
here is what we can paste:
a ## b -> ab
> ## = -> >=
a ## 2 -> a2
"""
p1 = None
if t1[0] == OP and t2[0] == OP:
p1 = OP
elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
p1 = IDENT
elif t1[0] == NUM and t2[0] == NUM:
p1 = NUM
if not p1:
raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
return (p1, t1[1] + t2[1])
def reduce_tokens(lst, defs, ban=[]):
"""replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied"""
i = 0
while i < len(lst):
(p, v) = lst[i]
if p == IDENT and v == "defined":
del lst[i]
if i < len(lst):
(p2, v2) = lst[i]
if p2 == IDENT:
if v2 in defs:
lst[i] = (NUM, 1)
else:
lst[i] = (NUM, 0)
elif p2 == OP and v2 == '(':
del lst[i]
(p2, v2) = lst[i]
del lst[i] # remove the ident, and change the ) for the value
if v2 in defs:
lst[i] = (NUM, 1)
else:
lst[i] = (NUM, 0)
else:
raise PreprocError("invalid define expression %r" % lst)
elif p == IDENT and v in defs:
if isinstance(defs[v], str):
a, b = extract_macro(defs[v])
defs[v] = b
macro_def = defs[v]
to_add = macro_def[1]
if isinstance(macro_def[0], list):
# macro without arguments
del lst[i]
for x in xrange(len(to_add)):
lst.insert(i, to_add[x])
i += 1
else:
# collect the arguments for the funcall
args = []
del lst[i]
if i >= len(lst):
raise PreprocError("expected '(' after %r (got nothing)" % v)
(p2, v2) = lst[i]
if p2 != OP or v2 != '(':
raise PreprocError("expected '(' after %r" % v)
del lst[i]
one_param = []
count_paren = 0
while i < len(lst):
p2, v2 = lst[i]
del lst[i]
if p2 == OP and count_paren == 0:
if v2 == '(':
one_param.append((p2, v2))
count_paren += 1
elif v2 == ')':
if one_param: args.append(one_param)
break
elif v2 == ',':
if not one_param: raise PreprocError("empty param in funcall %s" % p)
args.append(one_param)
one_param = []
else:
one_param.append((p2, v2))
else:
one_param.append((p2, v2))
if v2 == '(': count_paren += 1
elif v2 == ')': count_paren -= 1
else:
raise PreprocError('malformed macro')
# substitute the arguments within the define expression
accu = []
arg_table = macro_def[0]
j = 0
while j < len(to_add):
(p2, v2) = to_add[j]
if p2 == OP and v2 == '#':
# stringize is for arguments only
if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
toks = args[arg_table[to_add[j+1][1]]]
accu.append((STR, stringize(toks)))
j += 1
else:
accu.append((p2, v2))
elif p2 == OP and v2 == '##':
# token pasting, how can man invent such a complicated system?
if accu and j+1 < len(to_add):
# we have at least two tokens
t1 = accu[-1]
if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
toks = args[arg_table[to_add[j+1][1]]]
if toks:
accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
accu.extend(toks[1:])
else:
# error, case "a##"
accu.append((p2, v2))
accu.extend(toks)
elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
# TODO not sure
# first collect the tokens
va_toks = []
st = len(macro_def[0])
pt = len(args)
for x in args[pt-st+1:]:
va_toks.extend(x)
va_toks.append((OP, ','))
if va_toks: va_toks.pop() # extra comma
if len(accu)>1:
(p3, v3) = accu[-1]
(p4, v4) = accu[-2]
if v3 == '##':
# remove the token paste
accu.pop()
if v4 == ',' and pt < st:
# remove the comma
accu.pop()
accu += va_toks
else:
accu[-1] = paste_tokens(t1, to_add[j+1])
j += 1
else:
# invalid paste, case "##a" or "b##"
accu.append((p2, v2))
elif p2 == IDENT and v2 in arg_table:
toks = args[arg_table[v2]]
reduce_tokens(toks, defs, ban+[v])
accu.extend(toks)
else:
accu.append((p2, v2))
j += 1
reduce_tokens(accu, defs, ban+[v])
for x in xrange(len(accu)-1, -1, -1):
lst.insert(i, accu[x])
i += 1
def eval_macro(lst, adefs):
"""reduce the tokens from the list lst, and try to return a 0/1 result"""
reduce_tokens(lst, adefs, [])
if not lst: raise PreprocError("missing tokens to evaluate")
(p, v) = reduce_eval(lst)
return int(v) != 0
def extract_macro(txt):
"""process a macro definition from "#define f(x, y) x * y" into a function or a simple macro without arguments"""
t = tokenize(txt)
if re_fun.search(txt):
p, name = t[0]
p, v = t[1]
if p != OP: raise PreprocError("expected open parenthesis")
i = 1
pindex = 0
params = {}
prev = '('
while 1:
i += 1
p, v = t[i]
if prev == '(':
if p == IDENT:
params[v] = pindex
pindex += 1
prev = p
elif p == OP and v == ')':
break
else:
raise PreprocError("unexpected token (3)")
elif prev == IDENT:
if p == OP and v == ',':
prev = v
elif p == OP and v == ')':
break
else:
raise PreprocError("comma or ... expected")
elif prev == ',':
if p == IDENT:
params[v] = pindex
pindex += 1
prev = p
elif p == OP and v == '...':
raise PreprocError("not implemented (1)")
else:
raise PreprocError("comma or ... expected (2)")
elif prev == '...':
raise PreprocError("not implemented (2)")
else:
raise PreprocError("unexpected else")
#~ print (name, [params, t[i+1:]])
return (name, [params, t[i+1:]])
else:
(p, v) = t[0]
return (v, [[], t[1:]])
re_include = re.compile('^\s*(<(?P<a>.*)>|"(?P<b>.*)")')
def extract_include(txt, defs):
"""process a line in the form "#include foo" to return a string representing the file"""
m = re_include.search(txt)
if m:
if m.group('a'): return '<', m.group('a')
if m.group('b'): return '"', m.group('b')
# perform preprocessing and look at the result, it must match an include
toks = tokenize(txt)
reduce_tokens(toks, defs, ['waf_include'])
if not toks:
raise PreprocError("could not parse include %s" % txt)
if len(toks) == 1:
if toks[0][0] == STR:
return '"', toks[0][1]
else:
if toks[0][1] == '<' and toks[-1][1] == '>':
return stringize(toks).lstrip('<').rstrip('>')
raise PreprocError("could not parse include %s." % txt)
def parse_char(txt):
if not txt: raise PreprocError("attempted to parse a null char")
if txt[0] != '\\':
return ord(txt)
c = txt[1]
if c == 'x':
if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
return int(txt[2:], 16)
elif c.isdigit():
if c == '0' and len(txt)==2: return 0
for i in 3, 2, 1:
if len(txt) > i and txt[1:1+i].isdigit():
return (1+i, int(txt[1:1+i], 8))
else:
try: return chr_esc[c]
except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
def tokenize(s):
"""convert a string into a list of tokens (shlex.split does not apply to c/c++/d)"""
ret = []
for match in re_clexer.finditer(s):
m = match.group
for name in tok_types:
v = m(name)
if v:
if name == IDENT:
try: v = g_optrans[v]; name = OP
except KeyError:
# c++ specific
if v.lower() == "true":
v = 1
name = NUM
elif v.lower() == "false":
v = 0
name = NUM
elif name == NUM:
if m('oct'): v = int(v, 8)
elif m('hex'): v = int(m('hex'), 16)
elif m('n0'): v = m('n0')
else:
v = m('char')
if v: v = parse_char(v)
else: v = m('n2') or m('n4')
elif name == OP:
if v == '%:': v = '#'
elif v == '%:%:': v = '##'
elif name == STR:
# remove the quotes around the string
v = v[1:-1]
ret.append((name, v))
break
return ret
class c_parser(object):
def __init__(self, nodepaths=None, defines=None):
#self.lines = txt.split('\n')
self.lines = []
if defines is None:
self.defs = {}
else:
self.defs = dict(defines) # make a copy
self.state = []
self.env = None # needed for the variant when searching for files
self.count_files = 0
self.currentnode_stack = []
self.nodepaths = nodepaths or []
self.nodes = []
self.names = []
# file added
self.curfile = ''
self.ban_includes = []
def tryfind(self, filename):
self.curfile = filename
# for msvc it should be a for loop on the whole stack
found = self.currentnode_stack[-1].find_resource(filename)
for n in self.nodepaths:
if found:
break
found = n.find_resource(filename)
if not found:
if not filename in self.names:
self.names.append(filename)
else:
self.nodes.append(found)
if filename[-4:] != '.moc':
self.addlines(found)
return found
def addlines(self, node):
self.currentnode_stack.append(node.parent)
filepath = node.abspath(self.env)
self.count_files += 1
if self.count_files > recursion_limit: raise PreprocError("recursion limit exceeded")
pc = self.parse_cache
debug('preproc: reading file %r', filepath)
try:
lns = pc[filepath]
except KeyError:
pass
else:
self.lines = lns + self.lines
return
try:
lines = filter_comments(filepath)
lines.append((POPFILE, ''))
pc[filepath] = lines # cache the lines filtered
self.lines = lines + self.lines
except IOError:
raise PreprocError("could not read the file %s" % filepath)
except Exception:
if Logs.verbose > 0:
error("parsing %s failed" % filepath)
traceback.print_exc()
def start(self, node, env):
debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
self.env = env
variant = node.variant(env)
bld = node.__class__.bld
try:
self.parse_cache = bld.parse_cache
except AttributeError:
bld.parse_cache = {}
self.parse_cache = bld.parse_cache
self.addlines(node)
if env['DEFLINES']:
self.lines = [('define', x) for x in env['DEFLINES']] + self.lines
while self.lines:
(kind, line) = self.lines.pop(0)
if kind == POPFILE:
self.currentnode_stack.pop()
continue
try:
self.process_line(kind, line)
except Exception, e:
if Logs.verbose:
debug('preproc: line parsing failed (%s): %s %s', e, line, Utils.ex_stack())
def process_line(self, token, line):
ve = Logs.verbose
if ve: debug('preproc: line is %s - %s state is %s', token, line, self.state)
state = self.state
# make certain we define the state if we are about to enter in an if block
if token in ['ifdef', 'ifndef', 'if']:
state.append(undefined)
elif token == 'endif':
state.pop()
# skip lines when in a dead 'if' branch, wait for the endif
if not token in ['else', 'elif', 'endif']:
if skipped in self.state or ignored in self.state:
return
if token == 'if':
ret = eval_macro(tokenize(line), self.defs)
if ret: state[-1] = accepted
else: state[-1] = ignored
elif token == 'ifdef':
m = re_mac.search(line)
if m and m.group(0) in self.defs: state[-1] = accepted
else: state[-1] = ignored
elif token == 'ifndef':
m = re_mac.search(line)
if m and m.group(0) in self.defs: state[-1] = ignored
else: state[-1] = accepted
elif token == 'include' or token == 'import':
(kind, inc) = extract_include(line, self.defs)
if inc in self.ban_includes: return
if token == 'import': self.ban_includes.append(inc)
if ve: debug('preproc: include found %s (%s) ', inc, kind)
if kind == '"' or not strict_quotes:
self.tryfind(inc)
elif token == 'elif':
if state[-1] == accepted:
state[-1] = skipped
elif state[-1] == ignored:
if eval_macro(tokenize(line), self.defs):
state[-1] = accepted
elif token == 'else':
if state[-1] == accepted: state[-1] = skipped
elif state[-1] == ignored: state[-1] = accepted
elif token == 'define':
m = re_mac.search(line)
if m:
name = m.group(0)
if ve: debug('preproc: define %s %s', name, line)
self.defs[name] = line
else:
raise PreprocError("invalid define line %s" % line)
elif token == 'undef':
m = re_mac.search(line)
if m and m.group(0) in self.defs:
self.defs.__delitem__(m.group(0))
#print "undef %s" % name
elif token == 'pragma':
if re_pragma_once.search(line.lower()):
self.ban_includes.append(self.curfile)
def get_deps(node, env, nodepaths=[]):
"""
Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind
#include some_macro()
"""
gruik = c_parser(nodepaths)
gruik.start(node, env)
return (gruik.nodes, gruik.names)
#################### dumb dependency scanner
re_inc = re.compile(\
'^[ \t]*(#|%:)[ \t]*(include)[ \t]*(.*)\r*$',
re.IGNORECASE | re.MULTILINE)
def lines_includes(filename):
code = Utils.readf(filename)
if use_trigraphs:
for (a, b) in trig_def: code = code.split(a).join(b)
code = re_nl.sub('', code)
code = re_cpp.sub(repl, code)
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
def get_deps_simple(node, env, nodepaths=[], defines={}):
"""
Get the dependencies by just looking recursively at the #include statements
"""
nodes = []
names = []
def find_deps(node):
lst = lines_includes(node.abspath(env))
for (_, line) in lst:
(t, filename) = extract_include(line, defines)
if filename in names:
continue
if filename.endswith('.moc'):
names.append(filename)
found = None
for n in nodepaths:
if found:
break
found = n.find_resource(filename)
if not found:
if not filename in names:
names.append(filename)
elif not found in nodes:
nodes.append(found)
find_deps(node)
find_deps(node)
return (nodes, names)

401
tools/wafadmin/Tools/python.py

@ -1,401 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2007 (ita)
# Gustavo Carneiro (gjc), 2007
"Python support"
import os, sys
import TaskGen, Utils, Utils, Runner, Options, Build
from Logs import debug, warn, info
from TaskGen import extension, taskgen, before, after, feature
from Configure import conf
EXT_PY = ['.py']
FRAG_2 = '''
#ifdef __cplusplus
extern "C" {
#endif
void Py_Initialize(void);
void Py_Finalize(void);
#ifdef __cplusplus
}
#endif
int main()
{
Py_Initialize();
Py_Finalize();
return 0;
}
'''
@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
@feature('pyext')
@before('apply_bundle')
def init_pyext(self):
self.default_install_path = '${PYTHONDIR}'
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'PYEXT' in self.uselib:
self.uselib.append('PYEXT')
self.env['MACBUNDLE'] = True
@before('apply_link', 'apply_lib_vars', 'apply_type_vars')
@after('apply_bundle')
@feature('pyext')
def pyext_shlib_ext(self):
# override shlib_PATTERN set by the osx module
self.env['shlib_PATTERN'] = self.env['pyext_PATTERN']
@before('apply_incpaths', 'apply_lib_vars', 'apply_type_vars')
@feature('pyembed')
def init_pyembed(self):
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'PYEMBED' in self.uselib:
self.uselib.append('PYEMBED')
@extension(EXT_PY)
def process_py(self, node):
if not (self.bld.is_install and self.install_path):
return
def inst_py(ctx):
install_pyfile(self, node)
self.bld.add_post_fun(inst_py)
def install_pyfile(self, node):
path = self.bld.get_install_path(self.install_path + os.sep + node.name, self.env)
self.bld.install_files(self.install_path, [node], self.env, self.chmod, postpone=False)
if self.bld.is_install < 0:
info("* removing byte compiled python files")
for x in 'co':
try:
os.remove(path + x)
except OSError:
pass
if self.bld.is_install > 0:
if self.env['PYC'] or self.env['PYO']:
info("* byte compiling %r" % path)
if self.env['PYC']:
program = ("""
import sys, py_compile
for pyfile in sys.argv[1:]:
py_compile.compile(pyfile, pyfile + 'c')
""")
argv = [self.env['PYTHON'], '-c', program, path]
ret = Utils.pproc.Popen(argv).wait()
if ret:
raise Utils.WafError('bytecode compilation failed %r' % path)
if self.env['PYO']:
program = ("""
import sys, py_compile
for pyfile in sys.argv[1:]:
py_compile.compile(pyfile, pyfile + 'o')
""")
argv = [self.env['PYTHON'], self.env['PYFLAGS_OPT'], '-c', program, path]
ret = Utils.pproc.Popen(argv).wait()
if ret:
raise Utils.WafError('bytecode compilation failed %r' % path)
# COMPAT
class py_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@before('apply_core')
@after('vars_target_cprogram', 'vars_target_cstaticlib')
@feature('py')
def init_py(self):
self.default_install_path = '${PYTHONDIR}'
def _get_python_variables(python_exe, variables, imports=['import sys']):
"""Run a python interpreter and print some variables"""
program = list(imports)
program.append('')
for v in variables:
program.append("print(repr(%s))" % v)
os_env = dict(os.environ)
try:
del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
except KeyError:
pass
proc = Utils.pproc.Popen([python_exe, "-c", '\n'.join(program)], stdout=Utils.pproc.PIPE, env=os_env)
output = proc.communicate()[0].split("\n") # do not touch, python3
if proc.returncode:
if Options.options.verbose:
warn("Python program to extract python configuration variables failed:\n%s"
% '\n'.join(["line %03i: %s" % (lineno+1, line) for lineno, line in enumerate(program)]))
raise RuntimeError
return_values = []
for s in output:
s = s.strip()
if not s:
continue
if s == 'None':
return_values.append(None)
elif s[0] == "'" and s[-1] == "'":
return_values.append(s[1:-1])
elif s[0].isdigit():
return_values.append(int(s))
else: break
return return_values
@conf
def check_python_headers(conf):
"""Check for headers and libraries necessary to extend or embed python.
On success the environment variables xxx_PYEXT and xxx_PYEMBED are added for uselib
PYEXT: for compiling python extensions
PYEMBED: for embedding a python interpreter"""
if not conf.env['CC_NAME'] and not conf.env['CXX_NAME']:
conf.fatal('load a compiler first (gcc, g++, ..)')
if not conf.env['PYTHON_VERSION']:
conf.check_python_version()
env = conf.env
python = env['PYTHON']
if not python:
conf.fatal('could not find the python executable')
## On Mac OSX we need to use mac bundles for python plugins
if Options.platform == 'darwin':
conf.check_tool('osx')
try:
# Get some python configuration variables using distutils
v = 'prefix SO SYSLIBS LDFLAGS SHLIBS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET'.split()
(python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED,
python_MACOSX_DEPLOYMENT_TARGET) = \
_get_python_variables(python, ["get_config_var('%s')" % x for x in v],
['from distutils.sysconfig import get_config_var'])
except RuntimeError:
conf.fatal("Python development headers not found (-v for details).")
conf.log.write("""Configuration returned from %r:
python_prefix = %r
python_SO = %r
python_SYSLIBS = %r
python_LDFLAGS = %r
python_SHLIBS = %r
python_LIBDIR = %r
python_LIBPL = %r
INCLUDEPY = %r
Py_ENABLE_SHARED = %r
MACOSX_DEPLOYMENT_TARGET = %r
""" % (python, python_prefix, python_SO, python_SYSLIBS, python_LDFLAGS, python_SHLIBS,
python_LIBDIR, python_LIBPL, INCLUDEPY, Py_ENABLE_SHARED, python_MACOSX_DEPLOYMENT_TARGET))
if python_MACOSX_DEPLOYMENT_TARGET:
conf.env['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
conf.environ['MACOSX_DEPLOYMENT_TARGET'] = python_MACOSX_DEPLOYMENT_TARGET
env['pyext_PATTERN'] = '%s'+python_SO
# Check for python libraries for embedding
if python_SYSLIBS is not None:
for lib in python_SYSLIBS.split():
if lib.startswith('-l'):
lib = lib[2:] # strip '-l'
env.append_value('LIB_PYEMBED', lib)
if python_SHLIBS is not None:
for lib in python_SHLIBS.split():
if lib.startswith('-l'):
env.append_value('LIB_PYEMBED', lib[2:]) # strip '-l'
else:
env.append_value('LINKFLAGS_PYEMBED', lib)
if Options.platform != 'darwin' and python_LDFLAGS:
env.append_value('LINKFLAGS_PYEMBED', python_LDFLAGS.split())
result = False
name = 'python' + env['PYTHON_VERSION']
if python_LIBDIR is not None:
path = [python_LIBDIR]
conf.log.write("\n\n# Trying LIBDIR: %r\n" % path)
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
if not result and python_LIBPL is not None:
conf.log.write("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
path = [python_LIBPL]
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
if not result:
conf.log.write("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
path = [os.path.join(python_prefix, "libs")]
name = 'python' + env['PYTHON_VERSION'].replace('.', '')
result = conf.check(lib=name, uselib='PYEMBED', libpath=path)
if result:
env['LIBPATH_PYEMBED'] = path
env.append_value('LIB_PYEMBED', name)
else:
conf.log.write("\n\n### LIB NOT FOUND\n")
# under certain conditions, python extensions must link to
# python libraries, not just python embedding programs.
if (sys.platform == 'win32' or sys.platform.startswith('os2')
or sys.platform == 'darwin' or Py_ENABLE_SHARED):
env['LIBPATH_PYEXT'] = env['LIBPATH_PYEMBED']
env['LIB_PYEXT'] = env['LIB_PYEMBED']
# We check that pythonX.Y-config exists, and if it exists we
# use it to get only the includes, else fall back to distutils.
python_config = conf.find_program(
'python%s-config' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
var='PYTHON_CONFIG')
if not python_config:
python_config = conf.find_program(
'python-config-%s' % ('.'.join(env['PYTHON_VERSION'].split('.')[:2])),
var='PYTHON_CONFIG')
includes = []
if python_config:
for incstr in Utils.cmd_output("%s %s --includes" % (python, python_config)).strip().split():
# strip the -I or /I
if (incstr.startswith('-I')
or incstr.startswith('/I')):
incstr = incstr[2:]
# append include path, unless already given
if incstr not in includes:
includes.append(incstr)
conf.log.write("Include path for Python extensions "
"(found via python-config --includes): %r\n" % (includes,))
env['CPPPATH_PYEXT'] = includes
env['CPPPATH_PYEMBED'] = includes
else:
conf.log.write("Include path for Python extensions "
"(found via distutils module): %r\n" % (INCLUDEPY,))
env['CPPPATH_PYEXT'] = [INCLUDEPY]
env['CPPPATH_PYEMBED'] = [INCLUDEPY]
# Code using the Python API needs to be compiled with -fno-strict-aliasing
if env['CC_NAME'] == 'gcc':
env.append_value('CCFLAGS_PYEMBED', '-fno-strict-aliasing')
env.append_value('CCFLAGS_PYEXT', '-fno-strict-aliasing')
if env['CXX_NAME'] == 'gcc':
env.append_value('CXXFLAGS_PYEMBED', '-fno-strict-aliasing')
env.append_value('CXXFLAGS_PYEXT', '-fno-strict-aliasing')
# See if it compiles
conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H',
uselib='PYEMBED', fragment=FRAG_2,
errmsg='Could not find the python development headers', mandatory=1)
@conf
def check_python_version(conf, minver=None):
"""
Check if the python interpreter is found matching a given minimum version.
minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR'
(eg. '2.4') of the actual python version found, and PYTHONDIR is
defined, pointing to the site-packages directory appropriate for
this python version, where modules/packages/extensions should be
installed.
"""
assert minver is None or isinstance(minver, tuple)
python = conf.env['PYTHON']
if not python:
conf.fatal('could not find the python executable')
# Get python version string
cmd = [python, "-c", "import sys\nfor x in sys.version_info: print(str(x))"]
debug('python: Running python command %r' % cmd)
proc = Utils.pproc.Popen(cmd, stdout=Utils.pproc.PIPE)
lines = proc.communicate()[0].split()
assert len(lines) == 5, "found %i lines, expected 5: %r" % (len(lines), lines)
pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
# compare python version with the minimum required
result = (minver is None) or (pyver_tuple >= minver)
if result:
# define useful environment variables
pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
conf.env['PYTHON_VERSION'] = pyver
if 'PYTHONDIR' in conf.environ:
pydir = conf.environ['PYTHONDIR']
else:
if sys.platform == 'win32':
(python_LIBDEST, pydir) = \
_get_python_variables(python,
["get_config_var('LIBDEST')",
"get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
['from distutils.sysconfig import get_config_var, get_python_lib'])
else:
python_LIBDEST = None
(pydir,) = \
_get_python_variables(python,
["get_python_lib(standard_lib=0, prefix=%r)" % conf.env['PREFIX']],
['from distutils.sysconfig import get_config_var, get_python_lib'])
if python_LIBDEST is None:
if conf.env['LIBDIR']:
python_LIBDEST = os.path.join(conf.env['LIBDIR'], "python" + pyver)
else:
python_LIBDEST = os.path.join(conf.env['PREFIX'], "lib", "python" + pyver)
if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
conf.define('PYTHONDIR', pydir)
conf.env['PYTHONDIR'] = pydir
# Feedback
pyver_full = '.'.join(map(str, pyver_tuple[:3]))
if minver is None:
conf.check_message_custom('Python version', '', pyver_full)
else:
minver_str = '.'.join(map(str, minver))
conf.check_message('Python version', ">= %s" % minver_str, result, option=pyver_full)
if not result:
conf.fatal('The python version is too old (%r)' % pyver_full)
@conf
def check_python_module(conf, module_name):
"""
Check if the selected python interpreter can import the given python module.
"""
result = not Utils.pproc.Popen([conf.env['PYTHON'], "-c", "import %s" % module_name],
stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE).wait()
conf.check_message('Python module', module_name, result)
if not result:
conf.fatal('Could not find the python module %r' % module_name)
def detect(conf):
if not conf.env.PYTHON:
conf.env.PYTHON = sys.executable
python = conf.find_program('python', var='PYTHON')
if not python:
conf.fatal('Could not find the path of the python executable')
v = conf.env
v['PYCMD'] = '"import sys, py_compile;py_compile.compile(sys.argv[1], sys.argv[2])"'
v['PYFLAGS'] = ''
v['PYFLAGS_OPT'] = '-O'
v['PYC'] = getattr(Options.options, 'pyc', 1)
v['PYO'] = getattr(Options.options, 'pyo', 1)
def set_options(opt):
opt.add_option('--nopyc',
action='store_false',
default=1,
help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]',
dest = 'pyc')
opt.add_option('--nopyo',
action='store_false',
default=1,
help='Do not install optimised compiled .pyo files (configuration) [Default:install]',
dest='pyo')

77
tools/wafadmin/Tools/suncc.py

@ -1,77 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
import os, optparse
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
@conftest
def find_scc(conf):
v = conf.env
cc = None
if v['CC']: cc = v['CC']
elif 'CC' in conf.environ: cc = conf.environ['CC']
#if not cc: cc = conf.find_program('gcc', var='CC')
if not cc: cc = conf.find_program('cc', var='CC')
if not cc: conf.fatal('suncc was not found')
cc = conf.cmd_to_list(cc)
try:
if not Utils.cmd_output(cc + ['-flags']):
conf.fatal('suncc %r was not found' % cc)
except ValueError:
conf.fatal('suncc -flags could not be executed')
v['CC'] = cc
v['CC_NAME'] = 'sun'
@conftest
def scc_common_flags(conf):
v = conf.env
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CCDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h -Wl,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STATICLIB_MARKER'] = '-Bstatic'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CCFLAGS'] = ['-Kpic', '-DPIC']
v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
detect = '''
find_scc
find_cpp
find_ar
scc_common_flags
cc_load_tools
cc_add_flags
link_add_flags
'''

75
tools/wafadmin/Tools/suncxx.py

@ -1,75 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
import os, optparse
import Utils, Options, Configure
import ccroot, ar
from Configure import conftest
@conftest
def find_sxx(conf):
v = conf.env
cc = None
if v['CXX']: cc = v['CXX']
elif 'CXX' in conf.environ: cc = conf.environ['CXX']
if not cc: cc = conf.find_program('c++', var='CXX')
if not cc: conf.fatal('sunc++ was not found')
cc = conf.cmd_to_list(cc)
try:
if not Utils.cmd_output(cc + ['-flags']):
conf.fatal('sunc++ %r was not found' % cc)
except ValueError:
conf.fatal('sunc++ -flags could not be executed')
v['CXX'] = cc
v['CXX_NAME'] = 'sun'
@conftest
def sxx_common_flags(conf):
v = conf.env
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['-c', '-o', '']
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = ''
v['CXXLNK_TGT_F'] = ['-o', ''] # solaris hack, separate the -o from the target
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['CXXDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = '-Wl,-h -Wl,%s'
v['SHLIB_MARKER'] = '-Bdynamic'
v['STATICLIB_MARKER'] = '-Bstatic'
# program
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CXXFLAGS'] = ['-Kpic', '-DPIC']
v['shlib_LINKFLAGS'] = ['-G']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ['-Bstatic']
v['staticlib_PATTERN'] = 'lib%s.a'
detect = '''
find_sxx
find_cpp
find_ar
sxx_common_flags
cxx_load_tools
cxx_add_flags
'''

305
tools/wafadmin/Tools/unittestw.py

@ -1,305 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2006
"""
Unit tests run in the shutdown() method, and for c/c++ programs
One should NOT have to give parameters to programs to execute
In the shutdown method, add the following code:
>>> def shutdown():
... ut = UnitTest.unit_test()
... ut.run()
... ut.print_results()
Each object to use as a unit test must be a program and must have X{obj.unit_test=1}
"""
import os, sys
import Build, TaskGen, Utils, Options, Logs, Task
from TaskGen import before, after, feature
from Constants import *
class unit_test(object):
"Unit test representation"
def __init__(self):
self.returncode_ok = 0 # Unit test returncode considered OK. All returncodes differing from this one
# will cause the unit test to be marked as "FAILED".
# The following variables are filled with data by run().
# print_results() uses these for printing the unit test summary,
# but if there is need for direct access to the results,
# they can be retrieved here, after calling run().
self.num_tests_ok = 0 # Number of successful unit tests
self.num_tests_failed = 0 # Number of failed unit tests
self.num_tests_err = 0 # Tests that have not even run
self.total_num_tests = 0 # Total amount of unit tests
self.max_label_length = 0 # Maximum label length (pretty-print the output)
self.unit_tests = Utils.ordered_dict() # Unit test dictionary. Key: the label (unit test filename relative
# to the build dir), value: unit test filename with absolute path
self.unit_test_results = {} # Dictionary containing the unit test results.
# Key: the label, value: result (true = success false = failure)
self.unit_test_erroneous = {} # Dictionary indicating erroneous unit tests.
# Key: the label, value: true = unit test has an error false = unit test is ok
self.change_to_testfile_dir = False #True if the test file needs to be executed from the same dir
self.want_to_see_test_output = False #True to see the stdout from the testfile (for example check suites)
self.want_to_see_test_error = False #True to see the stderr from the testfile (for example check suites)
self.run_if_waf_does = 'check' #build was the old default
def run(self):
"Run the unit tests and gather results (note: no output here)"
self.num_tests_ok = 0
self.num_tests_failed = 0
self.num_tests_err = 0
self.total_num_tests = 0
self.max_label_length = 0
self.unit_tests = Utils.ordered_dict()
self.unit_test_results = {}
self.unit_test_erroneous = {}
ld_library_path = []
# If waf is not building, don't run anything
if not Options.commands[self.run_if_waf_does]: return
# Get the paths for the shared libraries, and obtain the unit tests to execute
for obj in Build.bld.all_task_gen:
try:
link_task = obj.link_task
except AttributeError:
pass
else:
lib_path = link_task.outputs[0].parent.abspath(obj.env)
if lib_path not in ld_library_path:
ld_library_path.append(lib_path)
unit_test = getattr(obj, 'unit_test', '')
if unit_test and 'cprogram' in obj.features:
try:
output = obj.path
filename = os.path.join(output.abspath(obj.env), obj.target)
srcdir = output.abspath()
label = os.path.join(output.bldpath(obj.env), obj.target)
self.max_label_length = max(self.max_label_length, len(label))
self.unit_tests[label] = (filename, srcdir)
except KeyError:
pass
self.total_num_tests = len(self.unit_tests)
# Now run the unit tests
Utils.pprint('GREEN', 'Running the unit tests')
count = 0
result = 1
for label in self.unit_tests.allkeys:
file_and_src = self.unit_tests[label]
filename = file_and_src[0]
srcdir = file_and_src[1]
count += 1
line = Build.bld.progress_line(count, self.total_num_tests, Logs.colors.GREEN, Logs.colors.NORMAL)
if Options.options.progress_bar and line:
sys.stderr.write(line)
sys.stderr.flush()
try:
kwargs = {}
kwargs['env'] = os.environ.copy()
if self.change_to_testfile_dir:
kwargs['cwd'] = srcdir
if not self.want_to_see_test_output:
kwargs['stdout'] = Utils.pproc.PIPE # PIPE for ignoring output
if not self.want_to_see_test_error:
kwargs['stderr'] = Utils.pproc.PIPE # PIPE for ignoring output
if ld_library_path:
v = kwargs['env']
def add_path(dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
if sys.platform == 'win32':
add_path(v, ld_library_path, 'PATH')
elif sys.platform == 'darwin':
add_path(v, ld_library_path, 'DYLD_LIBRARY_PATH')
add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
else:
add_path(v, ld_library_path, 'LD_LIBRARY_PATH')
pp = Utils.pproc.Popen(filename, **kwargs)
pp.wait()
result = int(pp.returncode == self.returncode_ok)
if result:
self.num_tests_ok += 1
else:
self.num_tests_failed += 1
self.unit_test_results[label] = result
self.unit_test_erroneous[label] = 0
except OSError:
self.unit_test_erroneous[label] = 1
self.num_tests_err += 1
except KeyboardInterrupt:
pass
if Options.options.progress_bar: sys.stdout.write(Logs.colors.cursor_on)
def print_results(self):
"Pretty-prints a summary of all unit tests, along with some statistics"
# If waf is not building, don't output anything
if not Options.commands[self.run_if_waf_does]: return
p = Utils.pprint
# Early quit if no tests were performed
if self.total_num_tests == 0:
p('YELLOW', 'No unit tests present')
return
for label in self.unit_tests.allkeys:
filename = self.unit_tests[label]
err = 0
result = 0
try: err = self.unit_test_erroneous[label]
except KeyError: pass
try: result = self.unit_test_results[label]
except KeyError: pass
n = self.max_label_length - len(label)
if err: n += 4
elif result: n += 7
else: n += 3
line = '%s %s' % (label, '.' * n)
if err: p('RED', '%sERROR' % line)
elif result: p('GREEN', '%sOK' % line)
else: p('YELLOW', '%sFAILED' % line)
percentage_ok = float(self.num_tests_ok) / float(self.total_num_tests) * 100.0
percentage_failed = float(self.num_tests_failed) / float(self.total_num_tests) * 100.0
percentage_erroneous = float(self.num_tests_err) / float(self.total_num_tests) * 100.0
p('NORMAL', '''
Successful tests: %i (%.1f%%)
Failed tests: %i (%.1f%%)
Erroneous tests: %i (%.1f%%)
Total number of tests: %i
''' % (self.num_tests_ok, percentage_ok, self.num_tests_failed, percentage_failed,
self.num_tests_err, percentage_erroneous, self.total_num_tests))
p('GREEN', 'Unit tests finished')
############################################################################################
"""
New unit test system
The targets with feature 'test' are executed after they are built
bld(features='cprogram cc test', ...)
To display the results:
import UnitTest
bld.add_post_fun(UnitTest.summary)
"""
import threading
testlock = threading.Lock()
def set_options(opt):
opt.add_option('--alltests', action='store_true', default=True, help='Exec all unit tests', dest='all_tests')
@feature('test')
@after('apply_link', 'vars_target_cprogram')
def make_test(self):
if not 'cprogram' in self.features:
Logs.error('test cannot be executed %s' % self)
return
self.default_install_path = None
self.create_task('utest', self.link_task.outputs)
def exec_test(self):
status = 0
variant = self.env.variant()
filename = self.inputs[0].abspath(self.env)
try:
fu = getattr(self.generator.bld, 'all_test_paths')
except AttributeError:
fu = os.environ.copy()
self.generator.bld.all_test_paths = fu
lst = []
for obj in self.generator.bld.all_task_gen:
link_task = getattr(obj, 'link_task', None)
if link_task and link_task.env.variant() == variant:
lst.append(link_task.outputs[0].parent.abspath(obj.env))
def add_path(dct, path, var):
dct[var] = os.pathsep.join(Utils.to_list(path) + [os.environ.get(var, '')])
if sys.platform == 'win32':
add_path(fu, lst, 'PATH')
elif sys.platform == 'darwin':
add_path(fu, lst, 'DYLD_LIBRARY_PATH')
add_path(fu, lst, 'LD_LIBRARY_PATH')
else:
add_path(fu, lst, 'LD_LIBRARY_PATH')
cwd = getattr(self.generator, 'ut_cwd', '') or self.inputs[0].parent.abspath(self.env)
proc = Utils.pproc.Popen(filename, cwd=cwd, env=fu, stderr=Utils.pproc.PIPE, stdout=Utils.pproc.PIPE)
(stdout, stderr) = proc.communicate()
tup = (filename, proc.returncode, stdout, stderr)
self.generator.utest_result = tup
testlock.acquire()
try:
bld = self.generator.bld
Logs.debug("ut: %r", tup)
try:
bld.utest_results.append(tup)
except AttributeError:
bld.utest_results = [tup]
finally:
testlock.release()
cls = Task.task_type_from_func('utest', func=exec_test, color='PINK', ext_in='.bin')
old = cls.runnable_status
def test_status(self):
if getattr(Options.options, 'all_tests', False):
return RUN_ME
return old(self)
cls.runnable_status = test_status
cls.quiet = 1
def summary(bld):
lst = getattr(bld, 'utest_results', [])
if lst:
Utils.pprint('CYAN', 'execution summary')
total = len(lst)
tfail = len([x for x in lst if x[1]])
Utils.pprint('CYAN', ' tests that pass %d/%d' % (total-tfail, total))
for (f, code, out, err) in lst:
if not code:
Utils.pprint('CYAN', ' %s' % f)
Utils.pprint('CYAN', ' tests that fail %d/%d' % (tfail, total))
for (f, code, out, err) in lst:
if code:
Utils.pprint('CYAN', ' %s' % f)

45
tools/wafadmin/Tools/winres.py

@ -1,45 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Brant Young, 2007
"This hook is called when the class cpp/cc task generator encounters a '.rc' file: X{.rc -> [.res|.rc.o]}"
import os, sys, re
import TaskGen, Task
from Utils import quote_whitespace
from TaskGen import extension
EXT_WINRC = ['.rc']
winrc_str = '${WINRC} ${_CPPDEFFLAGS} ${_CCDEFFLAGS} ${WINRCFLAGS} ${_CPPINCFLAGS} ${_CCINCFLAGS} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
@extension(EXT_WINRC)
def rc_file(self, node):
obj_ext = '.rc.o'
if self.env['WINRC_TGT_F'] == '/fo': obj_ext = '.res'
rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
self.compiled_tasks.append(rctask)
# create our action, for use with rc file
Task.simple_task_type('winrc', winrc_str, color='BLUE', before='cc cxx', shell=False)
def detect(conf):
v = conf.env
winrc = v['WINRC']
v['WINRC_TGT_F'] = '-o'
v['WINRC_SRC_F'] = '-i'
# find rc.exe
if not winrc:
if v['CC_NAME'] in ['gcc', 'cc', 'g++', 'c++']:
winrc = conf.find_program('windres', var='WINRC', path_list = v['PATH'])
elif v['CC_NAME'] == 'msvc':
winrc = conf.find_program('RC', var='WINRC', path_list = v['PATH'])
v['WINRC_TGT_F'] = '/fo'
v['WINRC_SRC_F'] = ''
if not winrc:
conf.fatal('winrc was not found!')
v['WINRCFLAGS'] = ''

77
tools/wafadmin/Tools/xlc.py

@ -1,77 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_xlc(conf):
cc = conf.find_program(['xlc_r', 'xlc'], var='CC', mandatory=True)
cc = conf.cmd_to_list(cc)
conf.env.CC_NAME = 'xlc'
conf.env.CC = cc
@conftest
def find_cpp(conf):
v = conf.env
cpp = None
if v['CPP']: cpp = v['CPP']
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
if not cpp: cpp = v['CC']
v['CPP'] = cpp
@conftest
def xlc_common_flags(conf):
v = conf.env
# CPPFLAGS CCDEFINES _CCINCFLAGS _CCDEFFLAGS
v['CCFLAGS_DEBUG'] = ['-g']
v['CCFLAGS_RELEASE'] = ['-O2']
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
v['CCLNK_SRC_F'] = ''
v['CCLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CCDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = ''
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CCFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ''
v['staticlib_PATTERN'] = 'lib%s.a'
def detect(conf):
conf.find_xlc()
conf.find_cpp()
conf.find_ar()
conf.xlc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()

77
tools/wafadmin/Tools/xlcxx.py

@ -1,77 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_xlcxx(conf):
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
cxx = conf.cmd_to_list(cxx)
conf.env.CXX_NAME = 'xlc++'
conf.env.CXX = cxx
@conftest
def find_cpp(conf):
v = conf.env
cpp = None
if v['CPP']: cpp = v['CPP']
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
if not cpp: cpp = v['CXX']
v['CPP'] = cpp
@conftest
def xlcxx_common_flags(conf):
v = conf.env
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
v['CXXFLAGS_DEBUG'] = ['-g']
v['CXXFLAGS_RELEASE'] = ['-O2']
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = ''
v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CXXDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = ''
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ''
v['staticlib_PATTERN'] = 'lib%s.a'
def detect(conf):
conf.find_xlcxx()
conf.find_cpp()
conf.find_ar()
conf.xlcxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()

707
tools/wafadmin/Utils.py

@ -1,707 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)
"""
Utilities, the stable ones are the following:
* h_file: compute a unique value for a file (hash), it uses
the module fnv if it is installed (see waf/utils/fnv & http://code.google.com/p/waf/wiki/FAQ)
else, md5 (see the python docs)
For large projects (projects with more than 15000 files) or slow hard disks and filesystems (HFS)
it is possible to use a hashing based on the path and the size (may give broken cache results)
The method h_file MUST raise an OSError if the file is a folder
import stat
def h_file(filename):
st = os.stat(filename)
if stat.S_ISDIR(st[stat.ST_MODE]): raise IOError('not a file')
m = Utils.md5()
m.update(str(st.st_mtime))
m.update(str(st.st_size))
m.update(filename)
return m.digest()
To replace the function in your project, use something like this:
import Utils
Utils.h_file = h_file
* h_list
* h_fun
* get_term_cols
* ordered_dict
"""
import os, sys, imp, string, errno, traceback, inspect, re, shutil, datetime, gc
# In python 3.0 we can get rid of all this
try: from UserDict import UserDict
except ImportError: from collections import UserDict
if sys.hexversion >= 0x2060000 or os.name == 'java':
import subprocess as pproc
else:
import pproc
import Logs
from Constants import *
try:
from collections import deque
except ImportError:
class deque(list):
def popleft(self):
return self.pop(0)
is_win32 = sys.platform == 'win32'
try:
# defaultdict in python 2.5
from collections import defaultdict as DefaultDict
except ImportError:
class DefaultDict(dict):
def __init__(self, default_factory):
super(DefaultDict, self).__init__()
self.default_factory = default_factory
def __getitem__(self, key):
try:
return super(DefaultDict, self).__getitem__(key)
except KeyError:
value = self.default_factory()
self[key] = value
return value
class WafError(Exception):
def __init__(self, *args):
self.args = args
try:
self.stack = traceback.extract_stack()
except:
pass
Exception.__init__(self, *args)
def __str__(self):
return str(len(self.args) == 1 and self.args[0] or self.args)
class WscriptError(WafError):
def __init__(self, message, wscript_file=None):
if wscript_file:
self.wscript_file = wscript_file
self.wscript_line = None
else:
try:
(self.wscript_file, self.wscript_line) = self.locate_error()
except:
(self.wscript_file, self.wscript_line) = (None, None)
msg_file_line = ''
if self.wscript_file:
msg_file_line = "%s:" % self.wscript_file
if self.wscript_line:
msg_file_line += "%s:" % self.wscript_line
err_message = "%s error: %s" % (msg_file_line, message)
WafError.__init__(self, err_message)
def locate_error(self):
stack = traceback.extract_stack()
stack.reverse()
for frame in stack:
file_name = os.path.basename(frame[0])
is_wscript = (file_name == WSCRIPT_FILE or file_name == WSCRIPT_BUILD_FILE)
if is_wscript:
return (frame[0], frame[1])
return (None, None)
indicator = is_win32 and '\x1b[A\x1b[K%s%s%s\r' or '\x1b[K%s%s%s\r'
try:
from fnv import new as md5
import Constants
Constants.SIG_NIL = 'signofnv'
def h_file(filename):
m = md5()
try:
m.hfile(filename)
x = m.digest()
if x is None: raise OSError("not a file")
return x
except SystemError:
raise OSError("not a file" + filename)
except ImportError:
try:
try:
from hashlib import md5
except ImportError:
from md5 import md5
def h_file(filename):
f = open(filename, 'rb')
m = md5()
while (filename):
filename = f.read(100000)
m.update(filename)
f.close()
return m.digest()
except ImportError:
# portability fixes may be added elsewhere (although, md5 should be everywhere by now)
md5 = None
class ordered_dict(UserDict):
def __init__(self, dict = None):
self.allkeys = []
UserDict.__init__(self, dict)
def __delitem__(self, key):
self.allkeys.remove(key)
UserDict.__delitem__(self, key)
def __setitem__(self, key, item):
if key not in self.allkeys: self.allkeys.append(key)
UserDict.__setitem__(self, key, item)
def exec_command(s, **kw):
if 'log' in kw:
kw['stdout'] = kw['stderr'] = kw['log']
del(kw['log'])
kw['shell'] = isinstance(s, str)
try:
proc = pproc.Popen(s, **kw)
return proc.wait()
except OSError:
return -1
if is_win32:
def exec_command(s, **kw):
if 'log' in kw:
kw['stdout'] = kw['stderr'] = kw['log']
del(kw['log'])
kw['shell'] = isinstance(s, str)
if len(s) > 2000:
startupinfo = pproc.STARTUPINFO()
startupinfo.dwFlags |= pproc.STARTF_USESHOWWINDOW
kw['startupinfo'] = startupinfo
try:
if 'stdout' not in kw:
kw['stdout'] = pproc.PIPE
kw['stderr'] = pproc.PIPE
proc = pproc.Popen(s,**kw)
(stdout, stderr) = proc.communicate()
Logs.info(stdout)
if stderr:
Logs.error(stderr)
return proc.returncode
else:
proc = pproc.Popen(s,**kw)
return proc.wait()
except OSError:
return -1
listdir = os.listdir
if is_win32:
def listdir_win32(s):
if re.match('^[A-Za-z]:$', s):
# os.path.isdir fails if s contains only the drive name... (x:)
s += os.sep
if not os.path.isdir(s):
e = OSError()
e.errno = errno.ENOENT
raise e
return os.listdir(s)
listdir = listdir_win32
def waf_version(mini = 0x010000, maxi = 0x100000):
"Halts if the waf version is wrong"
ver = HEXVERSION
try: min_val = mini + 0
except TypeError: min_val = int(mini.replace('.', '0'), 16)
if min_val > ver:
Logs.error("waf version should be at least %s (%s found)" % (mini, ver))
sys.exit(0)
try: max_val = maxi + 0
except TypeError: max_val = int(maxi.replace('.', '0'), 16)
if max_val < ver:
Logs.error("waf version should be at most %s (%s found)" % (maxi, ver))
sys.exit(0)
def python_24_guard():
if sys.hexversion < 0x20400f0 or sys.hexversion >= 0x3000000:
raise ImportError("Waf requires Python >= 2.3 but the raw source requires Python 2.4, 2.5 or 2.6")
def ex_stack():
exc_type, exc_value, tb = sys.exc_info()
if Logs.verbose > 1:
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
return ''.join(exc_lines)
return str(exc_value)
def to_list(sth):
if isinstance(sth, str):
return sth.split()
else:
return sth
g_loaded_modules = {}
"index modules by absolute path"
g_module=None
"the main module is special"
def load_module(file_path, name=WSCRIPT_FILE):
"this function requires an absolute path"
try:
return g_loaded_modules[file_path]
except KeyError:
pass
module = imp.new_module(name)
try:
code = readf(file_path, m='rU')
except (IOError, OSError):
raise WscriptError('Could not read the file %r' % file_path)
module.waf_hash_val = code
sys.path.insert(0, os.path.dirname(file_path))
try:
exec(compile(code, file_path, 'exec'), module.__dict__)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), file_path)
sys.path.pop(0)
g_loaded_modules[file_path] = module
return module
def set_main_module(file_path):
"Load custom options, if defined"
global g_module
g_module = load_module(file_path, 'wscript_main')
g_module.root_path = file_path
try:
g_module.APPNAME
except:
g_module.APPNAME = 'noname'
try:
g_module.VERSION
except:
g_module.VERSION = '1.0'
# note: to register the module globally, use the following:
# sys.modules['wscript_main'] = g_module
def to_hashtable(s):
"used for importing env files"
tbl = {}
lst = s.split('\n')
for line in lst:
if not line: continue
mems = line.split('=')
tbl[mems[0]] = mems[1]
return tbl
def get_term_cols():
"console width"
return 80
try:
import struct, fcntl, termios
except ImportError:
pass
else:
if Logs.got_tty:
def myfun():
dummy_lines, cols = struct.unpack("HHHH", \
fcntl.ioctl(sys.stderr.fileno(),termios.TIOCGWINSZ , \
struct.pack("HHHH", 0, 0, 0, 0)))[:2]
return cols
# we actually try the function once to see if it is suitable
try:
myfun()
except:
pass
else:
get_term_cols = myfun
rot_idx = 0
rot_chr = ['\\', '|', '/', '-']
"the rotation character in the progress bar"
def split_path(path):
return path.split('/')
def split_path_cygwin(path):
if path.startswith('//'):
ret = path.split('/')[2:]
ret[0] = '/' + ret[0]
return ret
return path.split('/')
re_sp = re.compile('[/\\\\]')
def split_path_win32(path):
if path.startswith('\\\\'):
ret = re.split(re_sp, path)[2:]
ret[0] = '\\' + ret[0]
return ret
return re.split(re_sp, path)
if sys.platform == 'cygwin':
split_path = split_path_cygwin
elif is_win32:
split_path = split_path_win32
def copy_attrs(orig, dest, names, only_if_set=False):
for a in to_list(names):
u = getattr(orig, a, ())
if u or not only_if_set:
setattr(dest, a, u)
def def_attrs(cls, **kw):
'''
set attributes for class.
@param cls [any class]: the class to update the given attributes in.
@param kw [dictionary]: dictionary of attributes names and values.
if the given class hasn't one (or more) of these attributes, add the attribute with its value to the class.
'''
for k, v in kw.iteritems():
if not hasattr(cls, k):
setattr(cls, k, v)
def quote_define_name(path):
fu = re.compile("[^a-zA-Z0-9]").sub("_", path)
fu = fu.upper()
return fu
def quote_whitespace(path):
return (path.strip().find(' ') > 0 and '"%s"' % path or path).replace('""', '"')
def trimquotes(s):
if not s: return ''
s = s.rstrip()
if s[0] == "'" and s[-1] == "'": return s[1:-1]
return s
def h_list(lst):
m = md5()
m.update(str(lst))
return m.digest()
def h_fun(fun):
try:
return fun.code
except AttributeError:
try:
h = inspect.getsource(fun)
except IOError:
h = "nocode"
try:
fun.code = h
except AttributeError:
pass
return h
def pprint(col, str, label='', sep=os.linesep):
"print messages in color"
sys.stderr.write("%s%s%s %s%s" % (Logs.colors(col), str, Logs.colors.NORMAL, label, sep))
def check_dir(dir):
"""If a folder doesn't exists, create it."""
try:
os.stat(dir)
except OSError:
try:
os.makedirs(dir)
except OSError, e:
raise WafError("Cannot create folder '%s' (original error: %s)" % (dir, e))
def cmd_output(cmd, **kw):
silent = False
if 'silent' in kw:
silent = kw['silent']
del(kw['silent'])
if 'e' in kw:
tmp = kw['e']
del(kw['e'])
kw['env'] = tmp
kw['shell'] = isinstance(cmd, str)
kw['stdout'] = pproc.PIPE
if silent:
kw['stderr'] = pproc.PIPE
try:
p = pproc.Popen(cmd, **kw)
output = p.communicate()[0]
except OSError, e:
raise ValueError(str(e))
if p.returncode:
if not silent:
msg = "command execution failed: %s -> %r" % (cmd, str(output))
raise ValueError(msg)
output = ''
return output
reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
def subst_vars(expr, params):
"substitute ${PREFIX}/bin in /usr/local/bin"
def repl_var(m):
if m.group(1):
return '\\'
if m.group(2):
return '$'
try:
# environments may contain lists
return params.get_flat(m.group(3))
except AttributeError:
return params[m.group(3)]
return reg_subst.sub(repl_var, expr)
def unversioned_sys_platform_to_binary_format(unversioned_sys_platform):
"infers the binary format from the unversioned_sys_platform name."
if unversioned_sys_platform in ('linux', 'freebsd', 'netbsd', 'openbsd', 'sunos'):
return 'elf'
elif unversioned_sys_platform == 'darwin':
return 'mac-o'
elif unversioned_sys_platform in ('win32', 'cygwin', 'uwin', 'msys'):
return 'pe'
# TODO we assume all other operating systems are elf, which is not true.
# we may set this to 'unknown' and have ccroot and other tools handle the case "gracefully" (whatever that means).
return 'elf'
def unversioned_sys_platform():
"""returns an unversioned name from sys.platform.
sys.plaform is not very well defined and depends directly on the python source tree.
The version appended to the names is unreliable as it's taken from the build environment at the time python was built,
i.e., it's possible to get freebsd7 on a freebsd8 system.
So we remove the version from the name, except for special cases where the os has a stupid name like os2 or win32.
Some possible values of sys.platform are, amongst others:
aix3 aix4 atheos beos5 darwin freebsd2 freebsd3 freebsd4 freebsd5 freebsd6 freebsd7
generic irix5 irix6 linux2 mac netbsd1 next3 os2emx riscos sunos5 unixware7
Investigating the python source tree may reveal more values.
"""
s = sys.platform
if s == 'java':
# The real OS is hidden under the JVM.
from java.lang import System
s = System.getProperty('os.name')
# see http://lopica.sourceforge.net/os.html for a list of possible values
if s == 'Mac OS X':
return 'darwin'
elif s.startswith('Windows '):
return 'win32'
elif s == 'OS/2':
return 'os2'
elif s == 'HP-UX':
return 'hpux'
elif s in ('SunOS', 'Solaris'):
return 'sunos'
else: s = s.lower()
if s == 'win32' or s.endswith('os2') and s != 'sunos2': return s
return re.split('\d+$', s)[0]
#@deprecated('use unversioned_sys_platform instead')
def detect_platform():
"""this function has been in the Utils module for some time.
It's hard to guess what people have used it for.
It seems its goal is to return an unversionned sys.platform, but it's not handling all platforms.
For example, the version is not removed on freebsd and netbsd, amongst others.
"""
s = sys.platform
# known POSIX
for x in 'cygwin linux irix sunos hpux aix darwin'.split():
# sys.platform may be linux2
if s.find(x) >= 0:
return x
# unknown POSIX
if os.name in 'posix java os2'.split():
return os.name
return s
def load_tool(tool, tooldir=None):
'''
load_tool: import a Python module, optionally using several directories.
@param tool [string]: name of tool to import.
@param tooldir [list]: directories to look for the tool.
@return: the loaded module.
Warning: this function is not thread-safe: plays with sys.path,
so must run in sequence.
'''
if tooldir:
assert isinstance(tooldir, list)
sys.path = tooldir + sys.path
try:
try:
return __import__(tool)
except ImportError, e:
Logs.error('Could not load the tool %r in %r:\n%s' % (tool, sys.path, e))
raise
finally:
if tooldir:
sys.path = sys.path[len(tooldir):]
def readf(fname, m='r'):
"get the contents of a file, it is not used anywhere for the moment"
f = open(fname, m)
try:
txt = f.read()
finally:
f.close()
return txt
def nada(*k, **kw):
"""A function that does nothing"""
pass
def diff_path(top, subdir):
"""difference between two absolute paths"""
top = os.path.normpath(top).replace('\\', '/').split('/')
subdir = os.path.normpath(subdir).replace('\\', '/').split('/')
if len(top) == len(subdir): return ''
diff = subdir[len(top) - len(subdir):]
return os.path.join(*diff)
class Context(object):
"""A base class for commands to be executed from Waf scripts"""
def set_curdir(self, dir):
self.curdir_ = dir
def get_curdir(self):
try:
return self.curdir_
except AttributeError:
self.curdir_ = os.getcwd()
return self.get_curdir()
curdir = property(get_curdir, set_curdir)
def recurse(self, dirs, name=''):
"""The function for calling scripts from folders, it tries to call wscript + function_name
and if that file does not exist, it will call the method 'function_name' from a file named wscript
the dirs can be a list of folders or a string containing space-separated folder paths
"""
if not name:
name = inspect.stack()[1][3]
if isinstance(dirs, str):
dirs = to_list(dirs)
for x in dirs:
if os.path.isabs(x):
nexdir = x
else:
nexdir = os.path.join(self.curdir, x)
base = os.path.join(nexdir, WSCRIPT_FILE)
file_path = base + '_' + name
try:
txt = readf(file_path, m='rU')
except (OSError, IOError):
try:
module = load_module(base)
except OSError:
raise WscriptError('No such script %s' % base)
try:
f = module.__dict__[name]
except KeyError:
raise WscriptError('No function %s defined in %s' % (name, base))
if getattr(self.__class__, 'pre_recurse', None):
self.pre_recurse(f, base, nexdir)
old = self.curdir
self.curdir = nexdir
try:
f(self)
finally:
self.curdir = old
if getattr(self.__class__, 'post_recurse', None):
self.post_recurse(module, base, nexdir)
else:
dc = {'ctx': self}
if getattr(self.__class__, 'pre_recurse', None):
dc = self.pre_recurse(txt, file_path, nexdir)
old = self.curdir
self.curdir = nexdir
try:
try:
exec(compile(txt, file_path, 'exec'), dc)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
raise WscriptError("".join(traceback.format_exception(exc_type, exc_value, tb)), base)
finally:
self.curdir = old
if getattr(self.__class__, 'post_recurse', None):
self.post_recurse(txt, file_path, nexdir)
if is_win32:
old = shutil.copy2
def copy2(src, dst):
old(src, dst)
shutil.copystat(src, src)
setattr(shutil, 'copy2', copy2)
def zip_folder(dir, zip_file_name, prefix):
"""
prefix represents the app to add in the archive
"""
import zipfile
zip = zipfile.ZipFile(zip_file_name, 'w', compression=zipfile.ZIP_DEFLATED)
base = os.path.abspath(dir)
if prefix:
if prefix[-1] != os.sep:
prefix += os.sep
n = len(base)
for root, dirs, files in os.walk(base):
for f in files:
archive_name = prefix + root[n:] + os.sep + f
zip.write(root + os.sep + f, archive_name, zipfile.ZIP_DEFLATED)
zip.close()
def get_elapsed_time(start):
"Format a time delta (datetime.timedelta) using the format DdHhMmS.MSs"
delta = datetime.datetime.now() - start
# cast to int necessary for python 3.0
days = int(delta.days)
hours = int(delta.seconds / 3600)
minutes = int((delta.seconds - hours * 3600) / 60)
seconds = delta.seconds - hours * 3600 - minutes * 60 \
+ float(delta.microseconds) / 1000 / 1000
result = ''
if days:
result += '%dd' % days
if days or hours:
result += '%dh' % hours
if days or hours or minutes:
result += '%dm' % minutes
return '%s%.3fs' % (result, seconds)
if os.name == 'java':
# For Jython (they should really fix the inconsistency)
try:
gc.disable()
gc.enable()
except NotImplementedError:
gc.disable = gc.enable

3
tools/wafadmin/__init__.py

@ -1,3 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005 (ita)

221
tools/wafadmin/ansiterm.py

@ -1,221 +0,0 @@
import sys, os
try:
if (not sys.stderr.isatty()) or (not sys.stdout.isatty()):
raise ValueError('not a tty')
from ctypes import *
class COORD(Structure):
_fields_ = [("X", c_short), ("Y", c_short)]
class SMALL_RECT(Structure):
_fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
_fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_short), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
class CONSOLE_CURSOR_INFO(Structure):
_fields_ = [('dwSize',c_ulong), ('bVisible', c_int)]
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
csinfo = CONSOLE_CURSOR_INFO()
hconsole = windll.kernel32.GetStdHandle(-11)
windll.kernel32.GetConsoleScreenBufferInfo(hconsole, byref(sbinfo))
if sbinfo.Size.X < 10 or sbinfo.Size.Y < 10: raise Exception('small console')
windll.kernel32.GetConsoleCursorInfo(hconsole, byref(csinfo))
except Exception:
pass
else:
import re, threading
to_int = lambda number, default: number and int(number) or default
wlock = threading.Lock()
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
class AnsiTerm(object):
def __init__(self):
self.hconsole = windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
self.cursor_history = []
def screen_buffer_info(self):
sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(sbinfo))
return sbinfo
def clear_line(self, param):
mode = param and int(param) or 0
sbinfo = self.screen_buffer_info()
if mode == 1: # Clear from begining of line to cursor position
line_start = COORD(0, sbinfo.CursorPosition.Y)
line_length = sbinfo.Size.X
elif mode == 2: # Clear entire line
line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
else: # Clear from cursor position to end of line
line_start = sbinfo.CursorPosition
line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
chars_written = c_int()
windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), line_length, line_start, byref(chars_written))
windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
def clear_screen(self, param):
mode = to_int(param, 0)
sbinfo = self.screen_buffer_info()
if mode == 1: # Clear from begining of screen to cursor position
clear_start = COORD(0, 0)
clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
elif mode == 2: # Clear entire screen and return cursor to home
clear_start = COORD(0, 0)
clear_length = sbinfo.Size.X * sbinfo.Size.Y
windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
else: # Clear from cursor position to end of screen
clear_start = sbinfo.CursorPosition
clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
chars_written = c_int()
windll.kernel32.FillConsoleOutputCharacterA(self.hconsole, c_char(' '), clear_length, clear_start, byref(chars_written))
windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
def push_cursor(self, param):
sbinfo = self.screen_buffer_info()
self.cursor_history.push(sbinfo.CursorPosition)
def pop_cursor(self, param):
if self.cursor_history:
old_pos = self.cursor_history.pop()
windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
def set_cursor(self, param):
x, sep, y = param.partition(';')
x = to_int(x, 1) - 1
y = to_int(y, 1) - 1
sbinfo = self.screen_buffer_info()
new_pos = COORD(
min(max(0, x), sbinfo.Size.X),
min(max(0, y), sbinfo.Size.Y)
)
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
def set_column(self, param):
x = to_int(param, 1) - 1
sbinfo = self.screen_buffer_info()
new_pos = COORD(
min(max(0, x), sbinfo.Size.X),
sbinfo.CursorPosition.Y
)
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
def move_cursor(self, x_offset=0, y_offset=0):
sbinfo = self.screen_buffer_info()
new_pos = COORD(
min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
)
windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
def move_up(self, param):
self.move_cursor(y_offset = -to_int(param, 1))
def move_down(self, param):
self.move_cursor(y_offset = to_int(param, 1))
def move_left(self, param):
self.move_cursor(x_offset = -to_int(param, 1))
def move_right(self, param):
self.move_cursor(x_offset = to_int(param, 1))
def next_line(self, param):
sbinfo = self.screen_buffer_info()
self.move_cursor(
x_offset = -sbinfo.CursorPosition.X,
y_offset = to_int(param, 1)
)
def prev_line(self, param):
sbinfo = self.screen_buffer_info()
self.move_cursor(
x_offset = -sbinfo.CursorPosition.X,
y_offset = -to_int(param, 1)
)
escape_to_color = { (0, 30): 0x0, #black
(0, 31): 0x4, #red
(0, 32): 0x2, #green
(0, 33): 0x4+0x2, #dark yellow
(0, 34): 0x1, #blue
(0, 35): 0x1+0x4, #purple
(0, 36): 0x2+0x4, #cyan
(0, 37): 0x1+0x2+0x4, #grey
(1, 30): 0x1+0x2+0x4, #dark gray
(1, 31): 0x4+0x8, #red
(1, 32): 0x2+0x8, #light green
(1, 33): 0x4+0x2+0x8, #yellow
(1, 34): 0x1+0x8, #light blue
(1, 35): 0x1+0x4+0x8, #light purple
(1, 36): 0x1+0x2+0x8, #light cyan
(1, 37): 0x1+0x2+0x4+0x8, #white
}
def set_color(self, param):
intensity, sep, color = param.partition(';')
intensity = to_int(intensity, 0)
color = to_int(color, 0)
if intensity and not color:
color, intensity = intensity, color
attrib = self.escape_to_color.get((intensity, color), 0x7)
windll.kernel32.SetConsoleTextAttribute(self.hconsole, attrib)
def show_cursor(self,param):
csinfo.bVisible = 1
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
def hide_cursor(self,param):
csinfo.bVisible = 0
windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(csinfo))
ansi_command_table = {
'A': move_up,
'B': move_down,
'C': move_right,
'D': move_left,
'E': next_line,
'F': prev_line,
'G': set_column,
'H': set_cursor,
'f': set_cursor,
'J': clear_screen,
'K': clear_line,
'h': show_cursor,
'l': hide_cursor,
'm': set_color,
's': push_cursor,
'u': pop_cursor,
}
# Match either the escape sequence or text not containing escape sequence
ansi_tokans = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
def write(self, text):
wlock.acquire()
for param, cmd, txt in self.ansi_tokans.findall(text):
if cmd:
cmd_func = self.ansi_command_table.get(cmd)
if cmd_func:
cmd_func(self, param)
else:
chars_written = c_int()
if isinstance(txt, unicode):
windll.kernel32.WriteConsoleW(self.hconsole, txt, len(txt), byref(chars_written), None)
else:
windll.kernel32.WriteConsoleA(self.hconsole, txt, len(txt), byref(chars_written), None)
wlock.release()
def flush(self):
pass
def isatty(self):
return True
sys.stderr = sys.stdout = AnsiTerm()
os.environ['TERM'] = 'vt100'

620
tools/wafadmin/pproc.py

@ -1,620 +0,0 @@
# borrowed from python 2.5.2c1
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
# Licensed to PSF under a Contributor Agreement.
import sys
mswindows = (sys.platform == "win32")
import os
import types
import traceback
import gc
class CalledProcessError(Exception):
def __init__(self, returncode, cmd):
self.returncode = returncode
self.cmd = cmd
def __str__(self):
return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
if mswindows:
import threading
import msvcrt
if 0:
import pywintypes
from win32api import GetStdHandle, STD_INPUT_HANDLE, \
STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
from win32api import GetCurrentProcess, DuplicateHandle, \
GetModuleFileName, GetVersion
from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
from win32pipe import CreatePipe
from win32process import CreateProcess, STARTUPINFO, \
GetExitCodeProcess, STARTF_USESTDHANDLES, \
STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
else:
from _subprocess import *
class STARTUPINFO:
dwFlags = 0
hStdInput = None
hStdOutput = None
hStdError = None
wShowWindow = 0
class pywintypes:
error = IOError
else:
import select
import errno
import fcntl
import pickle
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "CalledProcessError"]
try:
MAXFD = os.sysconf("SC_OPEN_MAX")
except:
MAXFD = 256
try:
False
except NameError:
False = 0
True = 1
_active = []
def _cleanup():
for inst in _active[:]:
if inst.poll(_deadstate=sys.maxint) >= 0:
try:
_active.remove(inst)
except ValueError:
pass
PIPE = -1
STDOUT = -2
def call(*popenargs, **kwargs):
return Popen(*popenargs, **kwargs).wait()
def check_call(*popenargs, **kwargs):
retcode = call(*popenargs, **kwargs)
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
if retcode:
raise CalledProcessError(retcode, cmd)
return retcode
def list2cmdline(seq):
result = []
needquote = False
for arg in seq:
bs_buf = []
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg) or arg == ""
if needquote:
result.append('"')
for c in arg:
if c == '\\':
bs_buf.append(c)
elif c == '"':
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
class Popen(object):
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
_cleanup()
self._child_created = False
if not isinstance(bufsize, (int, long)):
raise TypeError("bufsize must be an integer")
if mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows platforms")
if close_fds:
raise ValueError("close_fds is not supported on Windows platforms")
else:
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows platforms")
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.universal_newlines = universal_newlines
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
self._execute_child(args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
if mswindows:
if stdin is None and p2cwrite is not None:
os.close(p2cwrite)
p2cwrite = None
if stdout is None and c2pread is not None:
os.close(c2pread)
c2pread = None
if stderr is None and errread is not None:
os.close(errread)
errread = None
if p2cwrite:
self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
if c2pread:
if universal_newlines:
self.stdout = os.fdopen(c2pread, 'rU', bufsize)
else:
self.stdout = os.fdopen(c2pread, 'rb', bufsize)
if errread:
if universal_newlines:
self.stderr = os.fdopen(errread, 'rU', bufsize)
else:
self.stderr = os.fdopen(errread, 'rb', bufsize)
def _translate_newlines(self, data):
data = data.replace("\r\n", "\n")
data = data.replace("\r", "\n")
return data
def __del__(self, sys=sys):
if not self._child_created:
return
self.poll(_deadstate=sys.maxint)
if self.returncode is None and _active is not None:
_active.append(self)
def communicate(self, input=None):
if [self.stdin, self.stdout, self.stderr].count(None) >= 2:
stdout = None
stderr = None
if self.stdin:
if input:
self.stdin.write(input)
self.stdin.close()
elif self.stdout:
stdout = self.stdout.read()
elif self.stderr:
stderr = self.stderr.read()
self.wait()
return (stdout, stderr)
return self._communicate(input)
if mswindows:
def _get_handles(self, stdin, stdout, stderr):
if stdin is None and stdout is None and stderr is None:
return (None, None, None, None, None, None)
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
p2cread = GetStdHandle(STD_INPUT_HANDLE)
if p2cread is not None:
pass
elif stdin is None or stdin == PIPE:
p2cread, p2cwrite = CreatePipe(None, 0)
p2cwrite = p2cwrite.Detach()
p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
if c2pwrite is not None:
pass
elif stdout is None or stdout == PIPE:
c2pread, c2pwrite = CreatePipe(None, 0)
c2pread = c2pread.Detach()
c2pread = msvcrt.open_osfhandle(c2pread, 0)
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = GetStdHandle(STD_ERROR_HANDLE)
if errwrite is not None:
pass
elif stderr is None or stderr == PIPE:
errread, errwrite = CreatePipe(None, 0)
errread = errread.Detach()
errread = msvcrt.open_osfhandle(errread, 0)
elif stderr == STDOUT:
errwrite = c2pwrite
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
return DuplicateHandle(GetCurrentProcess(), handle, GetCurrentProcess(), 0, 1, DUPLICATE_SAME_ACCESS)
def _find_w9xpopen(self):
w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)), "w9xpopen.exe")
if not os.path.exists(w9xpopen):
w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix), "w9xpopen.exe")
if not os.path.exists(w9xpopen):
raise RuntimeError("Cannot locate w9xpopen.exe, which is needed for Popen to work with your shell or platform.")
return w9xpopen
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
if not isinstance(args, types.StringTypes):
args = list2cmdline(args)
if startupinfo is None:
startupinfo = STARTUPINFO()
if None not in (p2cread, c2pwrite, errwrite):
startupinfo.dwFlags |= STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
if shell:
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
startupinfo.wShowWindow = SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = comspec + " /c " + args
if (GetVersion() >= 0x80000000L or
os.path.basename(comspec).lower() == "command.com"):
w9xpopen = self._find_w9xpopen()
args = '"%s" %s' % (w9xpopen, args)
creationflags |= CREATE_NEW_CONSOLE
try:
hp, ht, pid, tid = CreateProcess(executable, args, None, None, 1, creationflags, env, cwd, startupinfo)
except pywintypes.error, e:
raise WindowsError(*e.args)
self._child_created = True
self._handle = hp
self.pid = pid
ht.Close()
if p2cread is not None:
p2cread.Close()
if c2pwrite is not None:
c2pwrite.Close()
if errwrite is not None:
errwrite.Close()
def poll(self, _deadstate=None):
if self.returncode is None:
if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
self.returncode = GetExitCodeProcess(self._handle)
return self.returncode
def wait(self):
if self.returncode is None:
obj = WaitForSingleObject(self._handle, INFINITE)
self.returncode = GetExitCodeProcess(self._handle)
return self.returncode
def _readerthread(self, fh, buffer):
buffer.append(fh.read())
def _communicate(self, input):
stdout = None
stderr = None
if self.stdout:
stdout = []
stdout_thread = threading.Thread(target=self._readerthread, args=(self.stdout, stdout))
stdout_thread.setDaemon(True)
stdout_thread.start()
if self.stderr:
stderr = []
stderr_thread = threading.Thread(target=self._readerthread, args=(self.stderr, stderr))
stderr_thread.setDaemon(True)
stderr_thread.start()
if self.stdin:
if input is not None:
self.stdin.write(input)
self.stdin.close()
if self.stdout:
stdout_thread.join()
if self.stderr:
stderr_thread.join()
if stdout is not None:
stdout = stdout[0]
if stderr is not None:
stderr = stderr[0]
if self.universal_newlines and hasattr(file, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)
else:
def _get_handles(self, stdin, stdout, stderr):
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = os.pipe()
elif isinstance(stdin, int):
p2cread = stdin
else:
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = os.pipe()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == PIPE:
errread, errwrite = os.pipe()
elif stderr == STDOUT:
errwrite = c2pwrite
elif isinstance(stderr, int):
errwrite = stderr
else:
errwrite = stderr.fileno()
return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
def _set_cloexec_flag(self, fd):
try:
cloexec_flag = fcntl.FD_CLOEXEC
except AttributeError:
cloexec_flag = 1
old = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
def _close_fds(self, but):
for i in xrange(3, MAXFD):
if i == but:
continue
try:
os.close(i)
except:
pass
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines, startupinfo, creationflags, shell,
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite):
if isinstance(args, types.StringTypes):
args = [args]
else:
args = list(args)
if shell:
args = ["/bin/sh", "-c"] + args
if executable is None:
executable = args[0]
errpipe_read, errpipe_write = os.pipe()
self._set_cloexec_flag(errpipe_write)
gc_was_enabled = gc.isenabled()
gc.disable()
try:
self.pid = os.fork()
except:
if gc_was_enabled:
gc.enable()
raise
self._child_created = True
if self.pid == 0:
try:
if p2cwrite:
os.close(p2cwrite)
if c2pread:
os.close(c2pread)
if errread:
os.close(errread)
os.close(errpipe_read)
if p2cread:
os.dup2(p2cread, 0)
if c2pwrite:
os.dup2(c2pwrite, 1)
if errwrite:
os.dup2(errwrite, 2)
if p2cread and p2cread not in (0,):
os.close(p2cread)
if c2pwrite and c2pwrite not in (p2cread, 1):
os.close(c2pwrite)
if errwrite and errwrite not in (p2cread, c2pwrite, 2):
os.close(errwrite)
if close_fds:
self._close_fds(but=errpipe_write)
if cwd is not None:
os.chdir(cwd)
if preexec_fn:
apply(preexec_fn)
if env is None:
os.execvp(executable, args)
else:
os.execvpe(executable, args, env)
except:
exc_type, exc_value, tb = sys.exc_info()
exc_lines = traceback.format_exception(exc_type, exc_value, tb)
exc_value.child_traceback = ''.join(exc_lines)
os.write(errpipe_write, pickle.dumps(exc_value))
os._exit(255)
if gc_was_enabled:
gc.enable()
os.close(errpipe_write)
if p2cread and p2cwrite:
os.close(p2cread)
if c2pwrite and c2pread:
os.close(c2pwrite)
if errwrite and errread:
os.close(errwrite)
data = os.read(errpipe_read, 1048576)
os.close(errpipe_read)
if data != "":
os.waitpid(self.pid, 0)
child_exception = pickle.loads(data)
raise child_exception
def _handle_exitstatus(self, sts):
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
elif os.WIFEXITED(sts):
self.returncode = os.WEXITSTATUS(sts)
else:
raise RuntimeError("Unknown child exit status!")
def poll(self, _deadstate=None):
if self.returncode is None:
try:
pid, sts = os.waitpid(self.pid, os.WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
except os.error:
if _deadstate is not None:
self.returncode = _deadstate
return self.returncode
def wait(self):
if self.returncode is None:
pid, sts = os.waitpid(self.pid, 0)
self._handle_exitstatus(sts)
return self.returncode
def _communicate(self, input):
read_set = []
write_set = []
stdout = None
stderr = None
if self.stdin:
self.stdin.flush()
if input:
write_set.append(self.stdin)
else:
self.stdin.close()
if self.stdout:
read_set.append(self.stdout)
stdout = []
if self.stderr:
read_set.append(self.stderr)
stderr = []
input_offset = 0
while read_set or write_set:
rlist, wlist, xlist = select.select(read_set, write_set, [])
if self.stdin in wlist:
bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
input_offset += bytes_written
if input_offset >= len(input):
self.stdin.close()
write_set.remove(self.stdin)
if self.stdout in rlist:
data = os.read(self.stdout.fileno(), 1024)
if data == "":
self.stdout.close()
read_set.remove(self.stdout)
stdout.append(data)
if self.stderr in rlist:
data = os.read(self.stderr.fileno(), 1024)
if data == "":
self.stderr.close()
read_set.remove(self.stderr)
stderr.append(data)
if stdout is not None:
stdout = ''.join(stdout)
if stderr is not None:
stderr = ''.join(stderr)
if self.universal_newlines and hasattr(file, 'newlines'):
if stdout:
stdout = self._translate_newlines(stdout)
if stderr:
stderr = self._translate_newlines(stderr)
self.wait()
return (stdout, stderr)

122
tools/wafadmin/py3kfixes.py

@ -1,122 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2009 (ita)
"""
Fixes for py3k go here
"""
import os
all_modifs = {}
def modif(dir, name, fun):
if name == '*':
lst = []
for y in '. Tools 3rdparty'.split():
for x in os.listdir(os.path.join(dir, y)):
if x.endswith('.py'):
lst.append(y + os.sep + x)
#lst = [y + os.sep + x for x in os.listdir(os.path.join(dir, y)) for y in '. Tools 3rdparty'.split() if x.endswith('.py')]
for x in lst:
modif(dir, x, fun)
return
filename = os.path.join(dir, name)
f = open(filename, 'r')
txt = f.read()
f.close()
txt = fun(txt)
f = open(filename, 'w')
f.write(txt)
f.close()
def subst(filename):
def do_subst(fun):
global all_modifs
try:
all_modifs[filename] += fun
except KeyError:
all_modifs[filename] = [fun]
return fun
return do_subst
@subst('Constants.py')
def r1(code):
code = code.replace("'iluvcuteoverload'", "b'iluvcuteoverload'")
code = code.replace("ABI=7", "ABI=37")
return code
@subst('Tools/ccroot.py')
def r2(code):
code = code.replace("p.stdin.write('\\n')", "p.stdin.write(b'\\n')")
code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
return code
@subst('Utils.py')
def r3(code):
code = code.replace("m.update(str(lst))", "m.update(str(lst).encode())")
code = code.replace('p.communicate()[0]', 'p.communicate()[0].decode("utf-8")')
return code
@subst('ansiterm.py')
def r33(code):
code = code.replace('unicode', 'str')
return code
@subst('Task.py')
def r4(code):
code = code.replace("up(self.__class__.__name__)", "up(self.__class__.__name__.encode())")
code = code.replace("up(self.env.variant())", "up(self.env.variant().encode())")
code = code.replace("up(x.parent.abspath())", "up(x.parent.abspath().encode())")
code = code.replace("up(x.name)", "up(x.name.encode())")
code = code.replace('class TaskBase(object):\n\t__metaclass__=store_task_type', 'import binascii\n\nclass TaskBase(object, metaclass=store_task_type):')
code = code.replace('keys=self.cstr_groups.keys()', 'keys=list(self.cstr_groups.keys())')
code = code.replace("sig.encode('hex')", 'binascii.hexlify(sig)')
return code
@subst('Build.py')
def r5(code):
code = code.replace("cPickle.dump(data,file,-1)", "cPickle.dump(data,file)")
code = code.replace('for node in src_dir_node.childs.values():', 'for node in list(src_dir_node.childs.values()):')
return code
@subst('*')
def r6(code):
code = code.replace('xrange', 'range')
code = code.replace('iteritems', 'items')
code = code.replace('maxint', 'maxsize')
code = code.replace('iterkeys', 'keys')
code = code.replace('Error,e:', 'Error as e:')
code = code.replace('Exception,e:', 'Exception as e:')
return code
@subst('TaskGen.py')
def r7(code):
code = code.replace('class task_gen(object):\n\t__metaclass__=register_obj', 'class task_gen(object, metaclass=register_obj):')
return code
@subst('Tools/python.py')
def r8(code):
code = code.replace('proc.communicate()[0]', 'proc.communicate()[0].decode("utf-8")')
return code
@subst('Tools/glib2.py')
def r9(code):
code = code.replace('f.write(c)', 'f.write(c.encode("utf-8"))')
return code
@subst('Tools/config_c.py')
def r10(code):
code = code.replace("key=kw['success']", "key=kw['success']\n\t\t\t\ttry:\n\t\t\t\t\tkey=key.decode('utf-8')\n\t\t\t\texcept:\n\t\t\t\t\tpass")
return code
def fixdir(dir):
global all_modifs
for k in all_modifs:
for v in all_modifs[k]:
modif(os.path.join(dir, 'wafadmin'), k, v)
#print('substitutions finished')
Loading…
Cancel
Save