Browse Source

Remove a bunch of useless waf files

v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
3d948d85ce
  1. 342
      tools/wafadmin/3rdparty/boost.py
  2. 27
      tools/wafadmin/3rdparty/fluid.py
  3. 128
      tools/wafadmin/3rdparty/gccdeps.py
  4. 97
      tools/wafadmin/3rdparty/go.py
  5. 199
      tools/wafadmin/3rdparty/swig.py
  6. 113
      tools/wafadmin/3rdparty/valadoc.py
  7. 35
      tools/wafadmin/3rdparty/wagner.py
  8. 38
      tools/wafadmin/Tools/bison.py
  9. 68
      tools/wafadmin/Tools/cs.py
  10. 34
      tools/wafadmin/Tools/dbus.py
  11. 25
      tools/wafadmin/Tools/flex.py
  12. 164
      tools/wafadmin/Tools/glib2.py
  13. 223
      tools/wafadmin/Tools/gnome.py
  14. 254
      tools/wafadmin/Tools/javaw.py
  15. 74
      tools/wafadmin/Tools/kde4.py
  16. 25
      tools/wafadmin/Tools/lua.py
  17. 792
      tools/wafadmin/Tools/msvc.py
  18. 298
      tools/wafadmin/Tools/ocaml.py
  19. 109
      tools/wafadmin/Tools/perl.py
  20. 505
      tools/wafadmin/Tools/qt4.py
  21. 120
      tools/wafadmin/Tools/ruby.py
  22. 227
      tools/wafadmin/Tools/tex.py
  23. 307
      tools/wafadmin/Tools/vala.py

342
tools/wafadmin/3rdparty/boost.py

@ -1,342 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
#
# partially based on boost.py written by Gernot Vormayr
# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
# modified by Bjoern Michaelsen, 2008
# modified by Luca Fossati, 2008
# rewritten for waf 1.5.1, Thomas Nagy, 2008
#
#def set_options(opt):
# opt.tool_options('boost')
# # ...
#
#def configure(conf):
# # ... (e.g. conf.check_tool('g++'))
# conf.check_tool('boost')
# conf.check_boost(lib='signals filesystem', static='onlystatic', score_version=(-1000, 1000), tag_minscore=1000)
#
#def build(bld):
# bld(source='main.c', target='bar', uselib="BOOST BOOST_SYSTEM")
#
#ISSUES:
# * find_includes should be called only once!
# * support mandatory
######## boost update ###########
## ITA: * the method get_boost_version_number does work
## * the rest of the code has not really been tried
# * make certain a demo is provided (in demos/adv for example)
# TODO: bad and underdocumented code -> boost.py will be removed in waf 1.6 to be rewritten later
import os.path, glob, types, re, sys
import Configure, config_c, Options, Utils, Logs
from Logs import warn
from Configure import conf
boost_code = '''
#include <iostream>
#include <boost/version.hpp>
int main() { std::cout << BOOST_VERSION << std::endl; }
'''
boost_libpath = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
boost_cpppath = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
STATIC_NOSTATIC = 'nostatic'
STATIC_BOTH = 'both'
STATIC_ONLYSTATIC = 'onlystatic'
is_versiontag = re.compile('^\d+_\d+_?\d*$')
is_threadingtag = re.compile('^mt$')
is_abitag = re.compile('^[sgydpn]+$')
is_toolsettag = re.compile('^(acc|borland|como|cw|dmc|darwin|gcc|hp_cxx|intel|kylix|vc|mgw|qcc|sun|vacpp)\d*$')
is_pythontag=re.compile('^py[0-9]{2}$')
def set_options(opt):
opt.add_option('--boost-includes', type='string', default='', dest='boostincludes', help='path to the boost directory where the includes are e.g. /usr/local/include/boost-1_35')
opt.add_option('--boost-libs', type='string', default='', dest='boostlibs', help='path to the directory where the boost libs are e.g. /usr/local/lib')
def string_to_version(s):
version = s.split('.')
if len(version) < 3: return 0
return int(version[0])*100000 + int(version[1])*100 + int(version[2])
def version_string(version):
major = version / 100000
minor = version / 100 % 1000
minor_minor = version % 100
if minor_minor == 0:
return "%d_%d" % (major, minor)
else:
return "%d_%d_%d" % (major, minor, minor_minor)
def libfiles(lib, pattern, lib_paths):
result = []
for lib_path in lib_paths:
libname = pattern % ('boost_%s[!_]*' % lib)
result += glob.glob(os.path.join(lib_path, libname))
return result
@conf
def get_boost_version_number(self, dir):
"""silently retrieve the boost version number"""
try:
return self.run_c_code(compiler='cxx', code=boost_code, includes=dir, execute=1, env=self.env.copy(), type='cprogram', compile_mode='cxx', compile_filename='test.cpp')
except Configure.ConfigurationError, e:
return -1
def set_default(kw, var, val):
if not var in kw:
kw[var] = val
def tags_score(tags, kw):
"""
checks library tags
see http://www.boost.org/doc/libs/1_35_0/more/getting_started/unix-variants.html 6.1
"""
score = 0
needed_tags = {
'threading': kw['tag_threading'],
'abi': kw['tag_abi'],
'toolset': kw['tag_toolset'],
'version': kw['tag_version'],
'python': kw['tag_python']
}
if kw['tag_toolset'] is None:
v = kw['env']
toolset = v['CXX_NAME']
if v['CXX_VERSION']:
version_no = v['CXX_VERSION'].split('.')
toolset += version_no[0]
if len(version_no) > 1:
toolset += version_no[1]
needed_tags['toolset'] = toolset
found_tags = {}
for tag in tags:
if is_versiontag.match(tag): found_tags['version'] = tag
if is_threadingtag.match(tag): found_tags['threading'] = tag
if is_abitag.match(tag): found_tags['abi'] = tag
if is_toolsettag.match(tag): found_tags['toolset'] = tag
if is_pythontag.match(tag): found_tags['python'] = tag
for tagname in needed_tags.iterkeys():
if needed_tags[tagname] is not None and tagname in found_tags:
if re.compile(needed_tags[tagname]).match(found_tags[tagname]):
score += kw['score_' + tagname][0]
else:
score += kw['score_' + tagname][1]
return score
@conf
def validate_boost(self, kw):
ver = kw.get('version', '')
for x in 'min_version max_version version'.split():
set_default(kw, x, ver)
set_default(kw, 'lib', '')
kw['lib'] = Utils.to_list(kw['lib'])
set_default(kw, 'env', self.env)
set_default(kw, 'libpath', boost_libpath)
set_default(kw, 'cpppath', boost_cpppath)
for x in 'tag_threading tag_version tag_toolset'.split():
set_default(kw, x, None)
set_default(kw, 'tag_abi', '^[^d]*$')
set_default(kw, 'python', str(sys.version_info[0]) + str(sys.version_info[1]) )
set_default(kw, 'tag_python', '^py' + kw['python'] + '$')
set_default(kw, 'score_threading', (10, -10))
set_default(kw, 'score_abi', (10, -10))
set_default(kw, 'score_python', (10,-10))
set_default(kw, 'score_toolset', (1, -1))
set_default(kw, 'score_version', (100, -100))
set_default(kw, 'score_min', 0)
set_default(kw, 'static', STATIC_NOSTATIC)
set_default(kw, 'found_includes', False)
set_default(kw, 'min_score', 0)
set_default(kw, 'errmsg', 'not found')
set_default(kw, 'okmsg', 'ok')
@conf
def find_boost_includes(self, kw):
"""
check every path in kw['cpppath'] for subdir
that either starts with boost- or is named boost.
Then the version is checked and selected accordingly to
min_version/max_version. The highest possible version number is
selected!
If no versiontag is set the versiontag is set accordingly to the
selected library and CPPPATH_BOOST is set.
"""
boostPath = getattr(Options.options, 'boostincludes', '')
if boostPath:
boostPath = [os.path.normpath(os.path.expandvars(os.path.expanduser(boostPath)))]
else:
boostPath = Utils.to_list(kw['cpppath'])
min_version = string_to_version(kw.get('min_version', ''))
max_version = string_to_version(kw.get('max_version', '')) or (sys.maxint - 1)
version = 0
for include_path in boostPath:
boost_paths = glob.glob(os.path.join(include_path, 'boost*'))
for path in boost_paths:
pathname = os.path.split(path)[-1]
ret = -1
if pathname == 'boost':
path = include_path
ret = self.get_boost_version_number(path)
elif pathname.startswith('boost-'):
ret = self.get_boost_version_number(path)
ret = int(ret)
if ret != -1 and ret >= min_version and ret <= max_version and ret > version:
boost_path = path
version = ret
if not version:
self.fatal('boost headers not found! (required version min: %s max: %s)'
% (kw['min_version'], kw['max_version']))
return False
found_version = version_string(version)
versiontag = '^' + found_version + '$'
if kw['tag_version'] is None:
kw['tag_version'] = versiontag
elif kw['tag_version'] != versiontag:
warn('boost header version %r and tag_version %r do not match!' % (versiontag, kw['tag_version']))
env = self.env
env['CPPPATH_BOOST'] = boost_path
env['BOOST_VERSION'] = found_version
self.found_includes = 1
ret = 'Version %s (%s)' % (found_version, boost_path)
return ret
@conf
def find_boost_library(self, lib, kw):
def find_library_from_list(lib, files):
lib_pattern = re.compile('.*boost_(.*?)\..*')
result = (None, None)
resultscore = kw['min_score'] - 1
for file in files:
m = lib_pattern.search(file, 1)
if m:
libname = m.group(1)
libtags = libname.split('-')[1:]
currentscore = tags_score(libtags, kw)
if currentscore > resultscore:
result = (libname, file)
resultscore = currentscore
return result
lib_paths = getattr(Options.options, 'boostlibs', '')
if lib_paths:
lib_paths = [os.path.normpath(os.path.expandvars(os.path.expanduser(lib_paths)))]
else:
lib_paths = Utils.to_list(kw['libpath'])
v = kw.get('env', self.env)
(libname, file) = (None, None)
if kw['static'] in [STATIC_NOSTATIC, STATIC_BOTH]:
st_env_prefix = 'LIB'
files = libfiles(lib, v['shlib_PATTERN'], lib_paths)
(libname, file) = find_library_from_list(lib, files)
if libname is None and kw['static'] in [STATIC_ONLYSTATIC, STATIC_BOTH]:
st_env_prefix = 'STATICLIB'
staticLibPattern = v['staticlib_PATTERN']
if self.env['CC_NAME'] == 'msvc':
staticLibPattern = 'lib' + staticLibPattern
files = libfiles(lib, staticLibPattern, lib_paths)
(libname, file) = find_library_from_list(lib, files)
if libname is not None:
v['LIBPATH_BOOST_' + lib.upper()] = [os.path.split(file)[0]]
if self.env['CC_NAME'] == 'msvc' and os.path.splitext(file)[1] == '.lib':
v[st_env_prefix + '_BOOST_' + lib.upper()] = ['libboost_'+libname]
else:
v[st_env_prefix + '_BOOST_' + lib.upper()] = ['boost_'+libname]
return
self.fatal('lib boost_' + lib + ' not found!')
@conf
def check_boost(self, *k, **kw):
"""
This should be the main entry point
- min_version
- max_version
- version
- include_path
- lib_path
- lib
- toolsettag - None or a regexp
- threadingtag - None or a regexp
- abitag - None or a regexp
- versiontag - WARNING: you should rather use version or min_version/max_version
- static - look for static libs (values:
'nostatic' or STATIC_NOSTATIC - ignore static libs (default)
'both' or STATIC_BOTH - find static libs, too
'onlystatic' or STATIC_ONLYSTATIC - find only static libs
- score_version
- score_abi
- scores_threading
- score_toolset
* the scores are tuples (match_score, nomatch_score)
match_score is the added to the score if the tag is matched
nomatch_score is added when a tag is found and does not match
- min_score
"""
if not self.env['CXX']:
self.fatal('load a c++ compiler tool first, for example conf.check_tool("g++")')
self.validate_boost(kw)
ret = None
try:
if not kw.get('found_includes', None):
self.check_message_1(kw.get('msg_includes', 'boost headers'))
ret = self.find_boost_includes(kw)
except Configure.ConfigurationError, e:
if 'errmsg' in kw:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
if 'okmsg' in kw:
self.check_message_2(kw.get('okmsg_includes', ret))
for lib in kw['lib']:
self.check_message_1('library boost_'+lib)
try:
self.find_boost_library(lib, kw)
except Configure.ConfigurationError, e:
ret = False
if 'errmsg' in kw:
self.check_message_2(kw['errmsg'], 'YELLOW')
if 'mandatory' in kw:
if Logs.verbose > 1:
raise
else:
self.fatal('the configuration failed (see %r)' % self.log.name)
else:
if 'okmsg' in kw:
self.check_message_2(kw['okmsg'])
return ret

27
tools/wafadmin/3rdparty/fluid.py

@ -1,27 +0,0 @@
#!/usr/bin/python
# encoding: utf-8
# Grygoriy Fuchedzhy 2009
"""
Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjuction with the 'cxx' feature.
"""
import Task
from TaskGen import extension
Task.simple_task_type('fluid', '${FLUID} -c -o ${TGT[0].abspath(env)} -h ${TGT[1].abspath(env)} ${SRC}', 'BLUE', shell=False, ext_out='.cxx')
@extension('.fl')
def fluid(self, node):
"""add the .fl to the source list; the cxx file generated will be compiled when possible"""
cpp = node.change_ext('.cpp')
hpp = node.change_ext('.hpp')
self.create_task('fluid', node, [cpp, hpp])
if 'cxx' in self.features:
self.allnodes.append(cpp)
def detect(conf):
fluid = conf.find_program('fluid', var='FLUID', mandatory=True)
conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)

128
tools/wafadmin/3rdparty/gccdeps.py

@ -1,128 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2008-2010 (ita)
"""
Execute the tasks with gcc -MD, read the dependencies from the .d file
and prepare the dependency calculation for the next run
"""
import os, re, threading
import Task, Logs, Utils, preproc
from TaskGen import before, after, feature
lock = threading.Lock()
preprocessor_flag = '-MD'
@feature('cc')
@before('apply_core')
def add_mmd_cc(self):
if self.env.get_flat('CCFLAGS').find(preprocessor_flag) < 0:
self.env.append_value('CCFLAGS', preprocessor_flag)
@feature('cxx')
@before('apply_core')
def add_mmd_cxx(self):
if self.env.get_flat('CXXFLAGS').find(preprocessor_flag) < 0:
self.env.append_value('CXXFLAGS', preprocessor_flag)
def scan(self):
"the scanner does not do anything initially"
nodes = self.generator.bld.node_deps.get(self.unique_id(), [])
names = []
return (nodes, names)
re_o = re.compile("\.o$")
re_src = re.compile("^(\.\.)[\\/](.*)$")
def post_run(self):
# The following code is executed by threads, it is not safe, so a lock is needed...
if getattr(self, 'cached', None):
return Task.Task.post_run(self)
name = self.outputs[0].abspath(self.env)
name = re_o.sub('.d', name)
txt = Utils.readf(name)
#os.unlink(name)
txt = txt.replace('\\\n', '')
lst = txt.strip().split(':')
val = ":".join(lst[1:])
val = val.split()
nodes = []
bld = self.generator.bld
f = re.compile("^("+self.env.variant()+"|\.\.)[\\/](.*)$")
for x in val:
if os.path.isabs(x):
if not preproc.go_absolute:
continue
lock.acquire()
try:
node = bld.root.find_resource(x)
finally:
lock.release()
else:
g = re.search(re_src, x)
if g:
x = g.group(2)
lock.acquire()
try:
node = bld.bldnode.parent.find_resource(x)
finally:
lock.release()
else:
g = re.search(f, x)
if g:
x = g.group(2)
lock.acquire()
try:
node = bld.srcnode.find_resource(x)
finally:
lock.release()
if id(node) == id(self.inputs[0]):
# ignore the source file, it is already in the dependencies
# this way, successful config tests may be retrieved from the cache
continue
if not node:
raise ValueError('could not find %r for %r' % (x, self))
else:
nodes.append(node)
Logs.debug('deps: real scanner for %s returned %s' % (str(self), str(nodes)))
bld.node_deps[self.unique_id()] = nodes
bld.raw_deps[self.unique_id()] = []
try:
del self.cache_sig
except:
pass
Task.Task.post_run(self)
import Constants, Utils
def sig_implicit_deps(self):
try:
return Task.Task.sig_implicit_deps(self)
except Utils.WafError:
return Constants.SIG_NIL
for name in 'cc cxx'.split():
try:
cls = Task.TaskBase.classes[name]
except KeyError:
pass
else:
cls.post_run = post_run
cls.scan = scan
cls.sig_implicit_deps = sig_implicit_deps

97
tools/wafadmin/3rdparty/go.py

@ -1,97 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# go.py - Waf tool for the Go programming language
# By: Tom Wambold <tom5760@gmail.com>
import platform
import Task
import Utils
from TaskGen import feature, extension, after
Task.simple_task_type('gocompile', '${GOC} ${GOCFLAGS} -o ${TGT} ${SRC}', shell=False)
Task.simple_task_type('gopack', '${GOP} grc ${TGT} ${SRC}', shell=False)
Task.simple_task_type('golink', '${GOL} ${GOLFLAGS} -o ${TGT} ${SRC}', shell=False)
def detect(conf):
def set_def(var, val):
if not conf.env[var]:
conf.env[var] = val
set_def('GO_PLATFORM', platform.machine())
if conf.env.GO_PLATFORM == 'x86_64':
set_def('GO_COMPILER', '6g')
set_def('GO_LINKER', '6l')
set_def('GO_EXTENSION', '.6')
elif conf.env.GO_PLATFORM == 'i386':
set_def('GO_COMPILER', '8g')
set_def('GO_LINKER', '8l')
set_def('GO_EXTENSION', '.8')
if not (conf.env.GO_COMPILER or conf.env.GO_LINKER or conf.env.GO_EXTENSION):
raise conf.fatal('Unsupported platform ' + platform.machine())
set_def('GO_PACK', 'gopack')
set_def('GO_PACK_EXTENSION', '.a')
conf.find_program(conf.env.GO_COMPILER, var='GOC', mandatory=True)
conf.find_program(conf.env.GO_LINKER, var='GOL', mandatory=True)
conf.find_program(conf.env.GO_PACK, var='GOP', mandatory=True)
@extension('.go')
def compile_go(self, node):
try:
self.go_nodes.append(node)
except AttributeError:
self.go_nodes = [node]
@feature('go')
@after('apply_core')
def apply_compile_go(self):
try:
nodes = self.go_nodes
except AttributeError:
self.go_compile_task = None
else:
self.go_compile_task = self.create_task('gocompile',
nodes,
[self.path.find_or_declare(self.target + self.env.GO_EXTENSION)])
@feature('gopackage', 'goprogram')
@after('apply_compile_go')
def apply_goinc(self):
if not getattr(self, 'go_compile_task', None):
return
names = self.to_list(getattr(self, 'uselib_local', []))
for name in names:
obj = self.name_to_obj(name)
if not obj:
raise Utils.WafError('object %r was not found in uselib_local '
'(required by %r)' % (lib_name, self.name))
obj.post()
self.go_compile_task.set_run_after(obj.go_package_task)
self.go_compile_task.deps_nodes.extend(obj.go_package_task.outputs)
self.env.append_unique('GOCFLAGS', '-I' + obj.path.abspath(obj.env))
self.env.append_unique('GOLFLAGS', '-L' + obj.path.abspath(obj.env))
@feature('gopackage')
@after('apply_goinc')
def apply_gopackage(self):
self.go_package_task = self.create_task('gopack',
self.go_compile_task.outputs[0],
self.path.find_or_declare(self.target + self.env.GO_PACK_EXTENSION))
self.go_package_task.set_run_after(self.go_compile_task)
self.go_package_task.deps_nodes.extend(self.go_compile_task.outputs)
@feature('goprogram')
@after('apply_goinc')
def apply_golink(self):
self.go_link_task = self.create_task('golink',
self.go_compile_task.outputs[0],
self.path.find_or_declare(self.target))
self.go_link_task.set_run_after(self.go_compile_task)
self.go_link_task.deps_nodes.extend(self.go_compile_task.outputs)

199
tools/wafadmin/3rdparty/swig.py

@ -1,199 +0,0 @@
#! /usr/bin/env python
# encoding: UTF-8
# Petar Forai
# Thomas Nagy 2008
import re
import Task, Utils, Logs
from TaskGen import extension, taskgen, feature, after
from Configure import conf
import preproc
"""
Welcome in the hell of adding tasks dynamically
swig interface files may be created at runtime, the module name may be unknown in advance
rev 5859 is much more simple
"""
SWIG_EXTS = ['.swig', '.i']
swig_str = '${SWIG} ${SWIGFLAGS} ${SRC}'
cls = Task.simple_task_type('swig', swig_str, color='BLUE', ext_in='.i .h', ext_out='.o .c .cxx', shell=False)
def runnable_status(self):
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
if not getattr(self, 'init_outputs', None):
self.init_outputs = True
if not getattr(self, 'module', None):
# search the module name
txt = self.inputs[0].read(self.env)
m = re_module.search(txt)
if not m:
raise ValueError("could not find the swig module name")
self.module = m.group(1)
swig_c(self)
# add the language-specific output files as nodes
# call funs in the dict swig_langs
for x in self.env['SWIGFLAGS']:
# obtain the language
x = x[1:]
try:
fun = swig_langs[x]
except KeyError:
pass
else:
fun(self)
return Task.Task.runnable_status(self)
setattr(cls, 'runnable_status', runnable_status)
re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
re_2 = re.compile('%include "(.*)"', re.M)
re_3 = re.compile('#include "(.*)"', re.M)
def scan(self):
"scan for swig dependencies, climb the .i files"
env = self.env
lst_src = []
seen = []
to_see = [self.inputs[0]]
while to_see:
node = to_see.pop(0)
if node.id in seen:
continue
seen.append(node.id)
lst_src.append(node)
# read the file
code = node.read(env)
code = preproc.re_nl.sub('', code)
code = preproc.re_cpp.sub(preproc.repl, code)
# find .i files and project headers
names = re_2.findall(code) + re_3.findall(code)
for n in names:
for d in self.generator.swig_dir_nodes + [node.parent]:
u = d.find_resource(n)
if u:
to_see.append(u)
break
else:
Logs.warn('could not find %r' % n)
# list of nodes this one depends on, and module name if present
if Logs.verbose:
Logs.debug('deps: deps for %s: %s' % (str(self), str(lst_src)))
return (lst_src, [])
cls.scan = scan
# provide additional language processing
swig_langs = {}
def swig(fun):
swig_langs[fun.__name__.replace('swig_', '')] = fun
def swig_c(self):
ext = '.swigwrap_%d.c' % self.generator.idx
flags = self.env['SWIGFLAGS']
if '-c++' in flags:
ext += 'xx'
out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
if '-c++' in flags:
task = self.generator.cxx_hook(out_node)
else:
task = self.generator.cc_hook(out_node)
task.set_run_after(self)
ge = self.generator.bld.generator
ge.outstanding.insert(0, task)
ge.total += 1
try:
ltask = self.generator.link_task
except AttributeError:
pass
else:
ltask.inputs.append(task.outputs[0])
self.outputs.append(out_node)
if not '-o' in self.env['SWIGFLAGS']:
self.env.append_value('SWIGFLAGS', '-o')
self.env.append_value('SWIGFLAGS', self.outputs[0].abspath(self.env))
@swig
def swig_python(tsk):
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.py'))
@swig
def swig_ocaml(tsk):
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.ml'))
tsk.set_outputs(tsk.inputs[0].parent.find_or_declare(tsk.module + '.mli'))
@taskgen
@feature('swig')
@after('apply_incpaths')
def add_swig_paths(self):
"""the attribute 'after' is not used here, the method is added directly at the end"""
self.swig_dir_nodes = self.env['INC_PATHS']
include_flags = self.env['_CXXINCFLAGS'] or self.env['_CCINCFLAGS']
self.env.append_unique('SWIGFLAGS', [f.replace("/I", "-I") for f in include_flags])
@extension(SWIG_EXTS)
def i_file(self, node):
if not 'add_swig_paths' in self.meths:
self.meths.append('add_swig_paths')
# the task instance
tsk = self.create_task('swig')
tsk.set_inputs(node)
tsk.module = getattr(self, 'swig_module', None)
flags = self.to_list(getattr(self, 'swig_flags', []))
tsk.env['SWIGFLAGS'] = flags
if not '-outdir' in flags:
flags.append('-outdir')
flags.append(node.parent.abspath(self.env))
@conf
def check_swig_version(conf, minver=None):
"""Check for a minimum swig version like conf.check_swig_version('1.3.28')
or conf.check_swig_version((1,3,28)) """
reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
swig_out = Utils.cmd_output('%s -version' % conf.env['SWIG'])
swigver = [int(s) for s in reg_swig.findall(swig_out)[0].split('.')]
if isinstance(minver, basestring):
minver = [int(s) for s in minver.split(".")]
if isinstance(minver, tuple):
minver = [int(s) for s in minver]
result = (minver is None) or (minver[:3] <= swigver[:3])
swigver_full = '.'.join(map(str, swigver))
if result:
conf.env['SWIG_VERSION'] = swigver_full
minver_str = '.'.join(map(str, minver))
if minver is None:
conf.check_message_custom('swig version', '', swigver_full)
else:
conf.check_message('swig version', '>= %s' % (minver_str,), result, option=swigver_full)
return result
def detect(conf):
swig = conf.find_program('swig', var='SWIG', mandatory=True)

113
tools/wafadmin/3rdparty/valadoc.py

@ -1,113 +0,0 @@
#! /usr/bin/env python
# encoding: UTF-8
# Nicolas Joseph 2009
from fnmatch import fnmatchcase
import os, os.path, re, stat
import Task, Utils, Node, Constants
from TaskGen import feature, extension, after
from Logs import debug, warn, error
VALADOC_STR = '${VALADOC}'
class valadoc_task(Task.Task):
vars = ['VALADOC', 'VALADOCFLAGS']
color = 'BLUE'
after = 'cxx_link cc_link'
quiet = True
output_dir = ''
doclet = ''
package_name = ''
package_version = ''
files = []
protected = False
private = False
inherit = False
deps = False
enable_non_null_experimental = False
force = False
def runnable_status(self):
return True
def run(self):
if self.env['VALADOC']:
if not self.env['VALADOCFLAGS']:
self.env['VALADOCFLAGS'] = ''
cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
cmd.append ('-o %s' % self.output_dir)
if getattr(self, 'doclet', None):
cmd.append ('--doclet %s' % self.doclet)
cmd.append ('--package-name %s' % self.package_name)
if getattr(self, 'version', None):
cmd.append ('--package-version %s' % self.package_version)
if getattr(self, 'packages', None):
for package in self.packages:
cmd.append ('--pkg %s' % package)
if getattr(self, 'vapi_dirs', None):
for vapi_dir in self.vapi_dirs:
cmd.append ('--vapidir %s' % vapi_dir)
if getattr(self, 'protected', None):
cmd.append ('--protected')
if getattr(self, 'private', None):
cmd.append ('--private')
if getattr(self, 'inherit', None):
cmd.append ('--inherit')
if getattr(self, 'deps', None):
cmd.append ('--deps')
if getattr(self, 'enable_non_null_experimental', None):
cmd.append ('--enable-non-null-experimental')
if getattr(self, 'force', None):
cmd.append ('--force')
cmd.append (' '.join ([x.relpath_gen (self.generator.bld.bldnode) for x in self.files]))
return self.generator.bld.exec_command(' '.join(cmd))
else:
error ('You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
return -1
@feature('valadoc')
def process_valadoc(self):
task = getattr(self, 'task', None)
if not task:
task = self.create_task('valadoc')
self.task = task
if getattr(self, 'output_dir', None):
task.output_dir = self.output_dir
else:
Utils.WafError('no output directory')
if getattr(self, 'doclet', None):
task.doclet = self.doclet
else:
Utils.WafError('no doclet directory')
if getattr(self, 'package_name', None):
task.package_name = self.package_name
else:
Utils.WafError('no package name')
if getattr(self, 'package_version', None):
task.package_version = self.package_version
if getattr(self, 'packages', None):
task.packages = Utils.to_list(self.packages)
if getattr(self, 'vapi_dirs', None):
task.vapi_dirs = Utils.to_list(self.vapi_dirs)
if getattr(self, 'files', None):
task.files = self.files
else:
Utils.WafError('no input file')
if getattr(self, 'protected', None):
task.protected = self.protected
if getattr(self, 'private', None):
task.private = self.private
if getattr(self, 'inherit', None):
task.inherit = self.inherit
if getattr(self, 'deps', None):
task.deps = self.deps
if getattr(self, 'enable_non_null_experimental', None):
task.enable_non_null_experimental = self.enable_non_null_experimental
if getattr(self, 'force', None):
task.force = self.force
def detect(conf):
conf.find_program('valadoc', var='VALADOC', mandatory=False)

35
tools/wafadmin/3rdparty/wagner.py

@ -1,35 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# ita 2010
import Logs, Utils, Build, Task
def say(txt):
Logs.warn("^o^: %s" % txt)
try:
ret = Utils.cmd_output('which cowsay 2> /dev/null').strip()
except Exception, e:
pass
else:
def say(txt):
f = Utils.cmd_output([ret, txt])
Utils.pprint('PINK', f)
say('you make the errors, we detect them')
def check_task_classes(self):
for x in Task.TaskBase.classes:
if isinstance(x, Task.Task):
if not getattr(cls, 'ext_in', None) or getattr(cls, 'before', None):
say('class %s has no precedence constraints (ext_in/before)')
if not getattr(cls, 'ext_out', None) or getattr(cls, 'after', None):
say('class %s has no precedence constraints (ext_out/after)')
comp = Build.BuildContext.compile
def compile(self):
if not getattr(self, 'magic', None):
check_task_classes(self)
return comp(self)
Build.BuildContext.compile = compile

38
tools/wafadmin/Tools/bison.py

@ -1,38 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy 2009
"Bison processing"
import Task
from TaskGen import extension
bison = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
cls = Task.simple_task_type('bison', bison, 'GREEN', ext_in='.yc .y .yy', ext_out='.c .cxx .h .l', shell=False)
@extension(['.y', '.yc', '.yy'])
def big_bison(self, node):
"""when it becomes complicated (unlike flex), the old recipes work better (cwd)"""
has_h = '-d' in self.env['BISONFLAGS']
outs = []
if node.name.endswith('.yc'):
outs.append(node.change_ext('.tab.cc'))
if has_h:
outs.append(node.change_ext('.tab.hh'))
else:
outs.append(node.change_ext('.tab.c'))
if has_h:
outs.append(node.change_ext('.tab.h'))
tsk = self.create_task('bison', node, outs)
tsk.cwd = node.bld_dir(tsk.env)
# and the c/cxx file must be compiled too
self.allnodes.append(outs[0])
def detect(conf):
bison = conf.find_program('bison', var='BISON', mandatory=True)
conf.env['BISONFLAGS'] = '-d'

68
tools/wafadmin/Tools/cs.py

@ -1,68 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"C# support"
import TaskGen, Utils, Task, Options
from Logs import error
from TaskGen import before, after, taskgen, feature
flag_vars= ['FLAGS', 'ASSEMBLIES']
@feature('cs')
def init_cs(self):
Utils.def_attrs(self,
flags = '',
assemblies = '',
resources = '',
uselib = '')
@feature('cs')
@after('init_cs')
def apply_uselib_cs(self):
if not self.uselib:
return
global flag_vars
for var in self.to_list(self.uselib):
for v in self.flag_vars:
val = self.env[v+'_'+var]
if val: self.env.append_value(v, val)
@feature('cs')
@after('apply_uselib_cs')
@before('apply_core')
def apply_cs(self):
try: self.meths.remove('apply_core')
except ValueError: pass
# process the flags for the assemblies
for i in self.to_list(self.assemblies) + self.env['ASSEMBLIES']:
self.env.append_unique('_ASSEMBLIES', '/r:'+i)
# process the flags for the resources
for i in self.to_list(self.resources):
self.env.append_unique('_RESOURCES', '/resource:'+i)
# what kind of assembly are we generating?
self.env['_TYPE'] = getattr(self, 'type', 'exe')
# additional flags
self.env.append_unique('_FLAGS', self.to_list(self.flags))
self.env.append_unique('_FLAGS', self.env.FLAGS)
# process the sources
nodes = [self.path.find_resource(i) for i in self.to_list(self.source)]
self.create_task('mcs', nodes, self.path.find_or_declare(self.target))
Task.simple_task_type('mcs', '${MCS} ${SRC} /target:${_TYPE} /out:${TGT} ${_FLAGS} ${_ASSEMBLIES} ${_RESOURCES}', color='YELLOW')
def detect(conf):
csc = getattr(Options.options, 'cscbinary', None)
if csc:
conf.env.MCS = csc
conf.find_program(['gmcs', 'mcs'], var='MCS')
def set_options(opt):
opt.add_option('--with-csc-binary', type='string', dest='cscbinary')

34
tools/wafadmin/Tools/dbus.py

@ -1,34 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
import Task, Utils
from TaskGen import taskgen, before, after, feature
@taskgen
def add_dbus_file(self, filename, prefix, mode):
if not hasattr(self, 'dbus_lst'):
self.dbus_lst = []
self.meths.append('process_dbus')
self.dbus_lst.append([filename, prefix, mode])
@before('apply_core')
def process_dbus(self):
for filename, prefix, mode in getattr(self, 'dbus_lst', []):
node = self.path.find_resource(filename)
if not node:
raise Utils.WafError('file not found ' + filename)
tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
tsk.env.DBUS_BINDING_TOOL_MODE = mode
Task.simple_task_type('dbus_binding_tool',
'${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}',
color='BLUE', before='cc')
def detect(conf):
dbus_binding_tool = conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')

25
tools/wafadmin/Tools/flex.py

@ -1,25 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# John O'Meara, 2006
# Thomas Nagy, 2006-2008
"Flex processing"
import TaskGen
def decide_ext(self, node):
if 'cxx' in self.features: return '.lex.cc'
else: return '.lex.c'
TaskGen.declare_chain(
name = 'flex',
rule = '${FLEX} -o${TGT} ${FLEXFLAGS} ${SRC}',
ext_in = '.l',
ext_out = '.c .cxx'
decider = decide_ext,
)
def detect(conf):
conf.find_program('flex', var='FLEX', mandatory=True)
conf.env['FLEXFLAGS'] = ''

164
tools/wafadmin/Tools/glib2.py

@ -1,164 +0,0 @@
#! /usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
"GLib2 support"
import Task, Utils
from TaskGen import taskgen, before, after, feature
#
# glib-genmarshal
#
@taskgen
def add_marshal_file(self, filename, prefix):
if not hasattr(self, 'marshal_list'):
self.marshal_list = []
self.meths.append('process_marshal')
self.marshal_list.append((filename, prefix))
@before('apply_core')
def process_marshal(self):
for f, prefix in getattr(self, 'marshal_list', []):
node = self.path.find_resource(f)
if not node:
raise Utils.WafError('file not found %r' % f)
h_node = node.change_ext('.h')
c_node = node.change_ext('.c')
task = self.create_task('glib_genmarshal', node, [h_node, c_node])
task.env.GLIB_GENMARSHAL_PREFIX = prefix
self.allnodes.append(c_node)
def genmarshal_func(self):
bld = self.inputs[0].__class__.bld
get = self.env.get_flat
cmd1 = "%s %s --prefix=%s --header > %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(self.env),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[0].abspath(self.env)
)
ret = bld.exec_command(cmd1)
if ret: return ret
#print self.outputs[1].abspath(self.env)
f = open(self.outputs[1].abspath(self.env), 'wb')
c = '''#include "%s"\n''' % self.outputs[0].name
f.write(c)
f.close()
cmd2 = "%s %s --prefix=%s --body >> %s" % (
get('GLIB_GENMARSHAL'),
self.inputs[0].srcpath(self.env),
get('GLIB_GENMARSHAL_PREFIX'),
self.outputs[1].abspath(self.env)
)
ret = Utils.exec_command(cmd2)
if ret: return ret
#
# glib-mkenums
#
@taskgen
def add_enums_from_template(self, source='', target='', template='', comments=''):
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'target': target,
'template': template,
'file-head': '',
'file-prod': '',
'file-tail': '',
'enum-prod': '',
'value-head': '',
'value-prod': '',
'value-tail': '',
'comments': comments})
@taskgen
def add_enums(self, source='', target='',
file_head='', file_prod='', file_tail='', enum_prod='',
value_head='', value_prod='', value_tail='', comments=''):
if not hasattr(self, 'enums_list'):
self.enums_list = []
self.meths.append('process_enums')
self.enums_list.append({'source': source,
'template': '',
'target': target,
'file-head': file_head,
'file-prod': file_prod,
'file-tail': file_tail,
'enum-prod': enum_prod,
'value-head': value_head,
'value-prod': value_prod,
'value-tail': value_tail,
'comments': comments})
@before('apply_core')
def process_enums(self):
for enum in getattr(self, 'enums_list', []):
task = self.create_task('glib_mkenums')
env = task.env
inputs = []
# process the source
source_list = self.to_list(enum['source'])
if not source_list:
raise Utils.WafError('missing source ' + str(enum))
source_list = [self.path.find_resource(k) for k in source_list]
inputs += source_list
env['GLIB_MKENUMS_SOURCE'] = [k.srcpath(env) for k in source_list]
# find the target
if not enum['target']:
raise Utils.WafError('missing target ' + str(enum))
tgt_node = self.path.find_or_declare(enum['target'])
if tgt_node.name.endswith('.c'):
self.allnodes.append(tgt_node)
env['GLIB_MKENUMS_TARGET'] = tgt_node.abspath(env)
options = []
if enum['template']: # template, if provided
template_node = self.path.find_resource(enum['template'])
options.append('--template %s' % (template_node.abspath(env)))
inputs.append(template_node)
params = {'file-head' : '--fhead',
'file-prod' : '--fprod',
'file-tail' : '--ftail',
'enum-prod' : '--eprod',
'value-head' : '--vhead',
'value-prod' : '--vprod',
'value-tail' : '--vtail',
'comments': '--comments'}
for param, option in params.iteritems():
if enum[param]:
options.append('%s %r' % (option, enum[param]))
env['GLIB_MKENUMS_OPTIONS'] = ' '.join(options)
# update the task instance
task.set_inputs(inputs)
task.set_outputs(tgt_node)
Task.task_type_from_func('glib_genmarshal', func=genmarshal_func, vars=['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL'],
color='BLUE', before='cc cxx')
Task.simple_task_type('glib_mkenums',
'${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}',
color='PINK', before='cc cxx')
def detect(conf):
glib_genmarshal = conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
mk_enums_tool = conf.find_program('glib-mkenums', var='GLIB_MKENUMS')

223
tools/wafadmin/Tools/gnome.py

@ -1,223 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
"Gnome support"
import os, re
import TaskGen, Utils, Runner, Task, Build, Options, Logs
import cc
from Logs import error
from TaskGen import taskgen, before, after, feature
n1_regexp = re.compile('<refentrytitle>(.*)</refentrytitle>', re.M)
n2_regexp = re.compile('<manvolnum>(.*)</manvolnum>', re.M)
def postinstall_schemas(prog_name):
if Build.bld.is_install:
dir = Build.bld.get_install_path('${PREFIX}/etc/gconf/schemas/%s.schemas' % prog_name)
if not Options.options.destdir:
# add the gconf schema
Utils.pprint('YELLOW', 'Installing GConf schema')
command = 'gconftool-2 --install-schema-file=%s 1> /dev/null' % dir
ret = Utils.exec_command(command)
else:
Utils.pprint('YELLOW', 'GConf schema not installed. After install, run this:')
Utils.pprint('YELLOW', 'gconftool-2 --install-schema-file=%s' % dir)
def postinstall_icons():
dir = Build.bld.get_install_path('${DATADIR}/icons/hicolor')
if Build.bld.is_install:
if not Options.options.destdir:
# update the pixmap cache directory
Utils.pprint('YELLOW', "Updating Gtk icon cache.")
command = 'gtk-update-icon-cache -q -f -t %s' % dir
ret = Utils.exec_command(command)
else:
Utils.pprint('YELLOW', 'Icon cache not updated. After install, run this:')
Utils.pprint('YELLOW', 'gtk-update-icon-cache -q -f -t %s' % dir)
def postinstall_scrollkeeper(prog_name):
if Build.bld.is_install:
# now the scrollkeeper update if we can write to the log file
if os.access('/var/log/scrollkeeper.log', os.W_OK):
dir1 = Build.bld.get_install_path('${PREFIX}/var/scrollkeeper')
dir2 = Build.bld.get_install_path('${DATADIR}/omf/%s' % prog_name)
command = 'scrollkeeper-update -q -p %s -o %s' % (dir1, dir2)
ret = Utils.exec_command(command)
def postinstall(prog_name='myapp', schemas=1, icons=1, scrollkeeper=1):
if schemas: postinstall_schemas(prog_name)
if icons: postinstall_icons()
if scrollkeeper: postinstall_scrollkeeper(prog_name)
# OBSOLETE
class gnome_doc_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('gnome_doc')
def init_gnome_doc(self):
self.default_install_path = '${PREFIX}/share'
@feature('gnome_doc')
@after('init_gnome_doc')
def apply_gnome_doc(self):
self.env['APPNAME'] = self.doc_module
lst = self.to_list(self.doc_linguas)
bld = self.bld
lst.append('C')
for x in lst:
if not x == 'C':
tsk = self.create_task('xml2po')
node = self.path.find_resource(x+'/'+x+'.po')
src = self.path.find_resource('C/%s.xml' % self.doc_module)
out = self.path.find_or_declare('%s/%s.xml' % (x, self.doc_module))
tsk.set_inputs([node, src])
tsk.set_outputs(out)
else:
out = self.path.find_resource('%s/%s.xml' % (x, self.doc_module))
tsk2 = self.create_task('xsltproc2po')
out2 = self.path.find_or_declare('%s/%s-%s.omf' % (x, self.doc_module, x))
tsk2.set_outputs(out2)
node = self.path.find_resource(self.doc_module+".omf.in")
tsk2.inputs = [node, out]
tsk2.run_after.append(tsk)
if bld.is_install:
path = self.install_path + '/gnome/help/%s/%s' % (self.doc_module, x)
bld.install_files(self.install_path + '/omf', out2, env=self.env)
for y in self.to_list(self.doc_figures):
try:
os.stat(self.path.abspath() + '/' + x + '/' + y)
bld.install_as(path + '/' + y, self.path.abspath() + '/' + x + '/' + y)
except:
bld.install_as(path + '/' + y, self.path.abspath() + '/C/' + y)
bld.install_as(path + '/%s.xml' % self.doc_module, out.abspath(self.env))
if x == 'C':
xmls = self.to_list(self.doc_includes)
xmls.append(self.doc_entities)
for z in xmls:
out = self.path.find_resource('%s/%s' % (x, z))
bld.install_as(path + '/%s' % z, out.abspath(self.env))
# OBSOLETE
class xml_to_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('xml_to')
def init_xml_to(self):
Utils.def_attrs(self,
source = 'xmlfile',
xslt = 'xlsltfile',
target = 'hey',
default_install_path = '${PREFIX}',
task_created = None)
@feature('xml_to')
@after('init_xml_to')
def apply_xml_to(self):
xmlfile = self.path.find_resource(self.source)
xsltfile = self.path.find_resource(self.xslt)
tsk = self.create_task('xmlto', [xmlfile, xsltfile], xmlfile.change_ext('html'))
tsk.install_path = self.install_path
def sgml_scan(self):
node = self.inputs[0]
env = self.env
variant = node.variant(env)
fi = open(node.abspath(env), 'r')
content = fi.read()
fi.close()
# we should use a sgml parser :-/
name = n1_regexp.findall(content)[0]
num = n2_regexp.findall(content)[0]
doc_name = name+'.'+num
if not self.outputs:
self.outputs = [self.generator.path.find_or_declare(doc_name)]
return ([], [doc_name])
class gnome_sgml2man_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('gnome_sgml2man')
def apply_gnome_sgml2man(self):
"""
we could make it more complicated, but for now we just scan the document each time
"""
assert(getattr(self, 'appname', None))
def install_result(task):
out = task.outputs[0]
name = out.name
ext = name[-1]
env = task.env
self.bld.install_files('${DATADIR}/man/man%s/' % ext, out, env)
self.bld.rescan(self.path)
for name in self.bld.cache_dir_contents[self.path.id]:
base, ext = os.path.splitext(name)
if ext != '.sgml': continue
task = self.create_task('sgml2man')
task.set_inputs(self.path.find_resource(name))
task.task_generator = self
if self.bld.is_install: task.install = install_result
# no outputs, the scanner does it
# no caching for now, this is not a time-critical feature
# in the future the scanner can be used to do more things (find dependencies, etc)
task.scan()
cls = Task.simple_task_type('sgml2man', '${SGML2MAN} -o ${TGT[0].bld_dir(env)} ${SRC} > /dev/null', color='BLUE')
cls.scan = sgml_scan
cls.quiet = 1
Task.simple_task_type('xmlto', '${XMLTO} html -m ${SRC[1].abspath(env)} ${SRC[0].abspath(env)}')
Task.simple_task_type('xml2po', '${XML2PO} ${XML2POFLAGS} ${SRC} > ${TGT}', color='BLUE')
# how do you expect someone to understand this?!
xslt_magic = """${XSLTPROC2PO} -o ${TGT[0].abspath(env)} \
--stringparam db2omf.basename ${APPNAME} \
--stringparam db2omf.format docbook \
--stringparam db2omf.lang ${TGT[0].abspath(env)[:-4].split('-')[-1]} \
--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
--stringparam db2omf.omf_dir ${PREFIX}/share/omf \
--stringparam db2omf.help_dir ${PREFIX}/share/gnome/help \
--stringparam db2omf.omf_in ${SRC[0].abspath(env)} \
--stringparam db2omf.scrollkeeper_cl ${SCROLLKEEPER_DATADIR}/Templates/C/scrollkeeper_cl.xml \
${DB2OMF} ${SRC[1].abspath(env)}"""
#--stringparam db2omf.dtd '-//OASIS//DTD DocBook XML V4.3//EN' \
Task.simple_task_type('xsltproc2po', xslt_magic, color='BLUE')
def detect(conf):
conf.check_tool('gnu_dirs glib2 dbus')
sgml2man = conf.find_program('docbook2man', var='SGML2MAN')
def getstr(varname):
return getattr(Options.options, varname, '')
# addefine also sets the variable to the env
conf.define('GNOMELOCALEDIR', os.path.join(conf.env['DATADIR'], 'locale'))
xml2po = conf.find_program('xml2po', var='XML2PO')
xsltproc2po = conf.find_program('xsltproc', var='XSLTPROC2PO')
conf.env['XML2POFLAGS'] = '-e -p'
conf.env['SCROLLKEEPER_DATADIR'] = Utils.cmd_output("scrollkeeper-config --pkgdatadir", silent=1).strip()
conf.env['DB2OMF'] = Utils.cmd_output("/usr/bin/pkg-config --variable db2omf gnome-doc-utils", silent=1).strip()
def set_options(opt):
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')

254
tools/wafadmin/Tools/javaw.py

@ -1,254 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2008 (ita)
"""
Java support
Javac is one of the few compilers that behaves very badly:
* it outputs files where it wants to (-d is only for the package root)
* it recompiles files silently behind your back
* it outputs an undefined amount of files (inner classes)
Fortunately, the convention makes it possible to use the build dir without
too many problems for the moment
Inner classes must be located and cleaned when a problem arise,
for the moment waf does not track the production of inner classes.
Adding all the files to a task and executing it if any of the input files
change is only annoying for the compilation times
Compilation can be run using Jython[1] rather than regular Python. Instead of
running one of the following commands:
./waf configure
python waf configure
You would have to run:
java -jar /path/to/jython.jar waf configure
[1] http://www.jython.org/
"""
import os, re
from Configure import conf
import TaskGen, Task, Utils, Options, Build
from TaskGen import feature, before, taskgen
class_check_source = '''
public class Test {
public static void main(String[] argv) {
Class lib;
if (argv.length < 1) {
System.err.println("Missing argument");
System.exit(77);
}
try {
lib = Class.forName(argv[0]);
} catch (ClassNotFoundException e) {
System.err.println("ClassNotFoundException");
System.exit(1);
}
lib = null;
System.exit(0);
}
}
'''
@feature('jar')
@before('apply_core')
def jar_files(self):
basedir = getattr(self, 'basedir', '.')
destfile = getattr(self, 'destfile', 'test.jar')
jaropts = getattr(self, 'jaropts', [])
jarcreate = getattr(self, 'jarcreate', 'cf')
dir = self.path.find_dir(basedir)
if not dir: raise
jaropts.append('-C')
jaropts.append(dir.abspath(self.env))
jaropts.append('.')
out = self.path.find_or_declare(destfile)
tsk = self.create_task('jar_create')
tsk.set_outputs(out)
tsk.inputs = [x for x in dir.find_iter(src=0, bld=1) if x.id != out.id]
tsk.env['JAROPTS'] = jaropts
tsk.env['JARCREATE'] = jarcreate
@feature('javac')
@before('apply_core')
def apply_java(self):
Utils.def_attrs(self, jarname='', jaropts='', classpath='',
sourcepath='.', srcdir='.', source_re='**/*.java',
jar_mf_attributes={}, jar_mf_classpath=[])
if getattr(self, 'source_root', None):
# old stuff
self.srcdir = self.source_root
nodes_lst = []
if not self.classpath:
if not self.env['CLASSPATH']:
self.env['CLASSPATH'] = '..' + os.pathsep + '.'
else:
self.env['CLASSPATH'] = self.classpath
srcdir_node = self.path.find_dir(self.srcdir)
if not srcdir_node:
raise Utils.WafError('could not find srcdir %r' % self.srcdir)
src_nodes = [x for x in srcdir_node.ant_glob(self.source_re, flat=False)]
bld_nodes = [x.change_ext('.class') for x in src_nodes]
self.env['OUTDIR'] = [srcdir_node.abspath(self.env)]
tsk = self.create_task('javac')
tsk.set_inputs(src_nodes)
tsk.set_outputs(bld_nodes)
if getattr(self, 'compat', None):
tsk.env.append_value('JAVACFLAGS', ['-source', self.compat])
if hasattr(self, 'sourcepath'):
fold = [self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
names = os.pathsep.join([x.srcpath() for x in fold])
else:
names = srcdir_node.srcpath()
if names:
tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
if self.jarname:
jtsk = self.create_task('jar_create', bld_nodes, self.path.find_or_declare(self.jarname))
jtsk.set_run_after(tsk)
if not self.env.JAROPTS:
if self.jaropts:
self.env.JAROPTS = self.jaropts
else:
dirs = '.'
self.env.JAROPTS = ['-C', ''.join(self.env['OUTDIR']), dirs]
Task.simple_task_type('jar_create', '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}', color='GREEN')
cls = Task.simple_task_type('javac', '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}')
cls.color = 'BLUE'
def post_run_javac(self):
"""this is for cleaning the folder
javac creates single files for inner classes
but it is not possible to know which inner classes in advance"""
par = {}
for x in self.inputs:
par[x.parent.id] = x.parent
inner = {}
for k in par.values():
path = k.abspath(self.env)
lst = os.listdir(path)
for u in lst:
if u.find('$') >= 0:
inner_class_node = k.find_or_declare(u)
inner[inner_class_node.id] = inner_class_node
to_add = set(inner.keys()) - set([x.id for x in self.outputs])
for x in to_add:
self.outputs.append(inner[x])
return Task.Task.post_run(self)
cls.post_run = post_run_javac
def detect(conf):
# If JAVA_PATH is set, we prepend it to the path list
java_path = conf.environ['PATH'].split(os.pathsep)
v = conf.env
if 'JAVA_HOME' in conf.environ:
java_path = [os.path.join(conf.environ['JAVA_HOME'], 'bin')] + java_path
conf.env['JAVA_HOME'] = [conf.environ['JAVA_HOME']]
for x in 'javac java jar'.split():
conf.find_program(x, var=x.upper(), path_list=java_path)
conf.env[x.upper()] = conf.cmd_to_list(conf.env[x.upper()])
v['JAVA_EXT'] = ['.java']
if 'CLASSPATH' in conf.environ:
v['CLASSPATH'] = conf.environ['CLASSPATH']
if not v['JAR']: conf.fatal('jar is required for making java packages')
if not v['JAVAC']: conf.fatal('javac is required for compiling java classes')
v['JARCREATE'] = 'cf' # can use cvf
@conf
def check_java_class(self, classname, with_classpath=None):
"""Check if the specified java class is installed"""
import shutil
javatestdir = '.waf-javatest'
classpath = javatestdir
if self.env['CLASSPATH']:
classpath += os.pathsep + self.env['CLASSPATH']
if isinstance(with_classpath, str):
classpath += os.pathsep + with_classpath
shutil.rmtree(javatestdir, True)
os.mkdir(javatestdir)
java_file = open(os.path.join(javatestdir, 'Test.java'), 'w')
java_file.write(class_check_source)
java_file.close()
# Compile the source
Utils.exec_command(self.env['JAVAC'] + [os.path.join(javatestdir, 'Test.java')], shell=False)
# Try to run the app
cmd = self.env['JAVA'] + ['-cp', classpath, 'Test', classname]
self.log.write("%s\n" % str(cmd))
found = Utils.exec_command(cmd, shell=False, log=self.log)
self.check_message('Java class %s' % classname, "", not found)
shutil.rmtree(javatestdir, True)
return found
@conf
def check_jni_headers(conf):
"""
Check for jni headers and libraries
On success the environment variable xxx_JAVA is added for uselib
"""
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a compiler first (gcc, g++, ..)')
if not conf.env.JAVA_HOME:
conf.fatal('set JAVA_HOME in the system environment')
# jni requires the jvm
javaHome = conf.env['JAVA_HOME'][0]
b = Build.BuildContext()
b.load_dirs(conf.srcdir, conf.blddir)
dir = b.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
f = dir.ant_glob('**/(jni|jni_md).h', flat=False)
incDirs = [x.parent.abspath() for x in f]
dir = b.root.find_dir(conf.env.JAVA_HOME[0])
f = dir.ant_glob('**/*jvm.(so|dll)', flat=False)
libDirs = [x.parent.abspath() for x in f] or [javaHome]
for i, d in enumerate(libDirs):
if conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA'):
break
else:
conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)

74
tools/wafadmin/Tools/kde4.py

@ -1,74 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
import os, sys, re
import Options, TaskGen, Task, Utils
from TaskGen import taskgen, feature, after
class msgfmt_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('msgfmt')
def init_msgfmt(self):
#langs = '' # for example "foo/fr foo/br"
self.default_install_path = '${KDE4_LOCALE_INSTALL_DIR}'
@feature('msgfmt')
@after('init_msgfmt')
def apply_msgfmt(self):
for lang in self.to_list(self.langs):
node = self.path.find_resource(lang+'.po')
task = self.create_task('msgfmt', node, node.change_ext('.mo'))
if not self.bld.is_install: continue
langname = lang.split('/')
langname = langname[-1]
task.install_path = self.install_path + os.sep + langname + os.sep + 'LC_MESSAGES'
task.filename = getattr(self, 'appname', 'set_your_appname') + '.mo'
task.chmod = self.chmod
def detect(conf):
kdeconfig = conf.find_program('kde4-config')
if not kdeconfig:
conf.fatal('we need kde4-config')
prefix = Utils.cmd_output('%s --prefix' % kdeconfig, silent=True).strip()
file = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try: os.stat(file)
except OSError:
file = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
try: os.stat(file)
except OSError: conf.fatal('could not open %s' % file)
try:
txt = Utils.readf(file)
except (OSError, IOError):
conf.fatal('could not read %s' % file)
txt = txt.replace('\\\n', '\n')
fu = re.compile('#(.*)\n')
txt = fu.sub('', txt)
setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
found = setregexp.findall(txt)
for (_, key, val) in found:
#print key, val
conf.env[key] = val
# well well, i could just write an interpreter for cmake files
conf.env['LIB_KDECORE']='kdecore'
conf.env['LIB_KDEUI'] ='kdeui'
conf.env['LIB_KIO'] ='kio'
conf.env['LIB_KHTML'] ='khtml'
conf.env['LIB_KPARTS'] ='kparts'
conf.env['LIBPATH_KDECORE'] = conf.env['KDE4_LIB_INSTALL_DIR']
conf.env['CPPPATH_KDECORE'] = conf.env['KDE4_INCLUDE_INSTALL_DIR']
conf.env.append_value('CPPPATH_KDECORE', conf.env['KDE4_INCLUDE_INSTALL_DIR']+"/KDE")
conf.env['MSGFMT'] = conf.find_program('msgfmt')
Task.simple_task_type('msgfmt', '${MSGFMT} ${SRC} -o ${TGT}', color='BLUE', shell=False)

25
tools/wafadmin/Tools/lua.py

@ -1,25 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Sebastian Schlingmann, 2008
# Thomas Nagy, 2008 (ita)
import TaskGen
from TaskGen import taskgen, feature
from Constants import *
TaskGen.declare_chain(
name = 'luac',
rule = '${LUAC} -s -o ${TGT} ${SRC}',
ext_in = '.lua',
ext_out = '.luac',
reentrant = False,
install = 'LUADIR', # env variable
)
@feature('lua')
def init_lua(self):
self.default_chmod = O755
def detect(conf):
conf.find_program('luac', var='LUAC', mandatory = True)

792
tools/wafadmin/Tools/msvc.py

@ -1,792 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Carlos Rafael Giani, 2006 (dv)
# Tamas Pal, 2007 (folti)
# Nicolas Mercier, 2009
# Microsoft Visual C++/Intel C++ compiler support - beta, needs more testing
# usage:
#
# conf.env['MSVC_VERSIONS'] = ['msvc 9.0', 'msvc 8.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
# conf.env['MSVC_TARGETS'] = ['x64']
# conf.check_tool('msvc')
# OR conf.check_tool('msvc', funs='no_autodetect')
# conf.check_lib_msvc('gdi32')
# conf.check_libs_msvc('kernel32 user32', mandatory=true)
# ...
# obj.uselib = 'KERNEL32 USER32 GDI32'
#
# platforms and targets will be tested in the order they appear;
# the first good configuration will be used
# supported platforms :
# ia64, x64, x86, x86_amd64, x86_ia64
# compilers supported :
# msvc => Visual Studio, versions 7.1 (2003), 8,0 (2005), 9.0 (2008)
# wsdk => Windows SDK, versions 6.0, 6.1, 7.0
# icl => Intel compiler, versions 9,10,11
# Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
# PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
import os, sys, re, string, optparse
import Utils, TaskGen, Runner, Configure, Task, Options
from Logs import debug, info, warn, error
from TaskGen import after, before, feature
from Configure import conftest, conf
import ccroot, cc, cxx, ar, winres
from libtool import read_la_file
import _winreg
pproc = Utils.pproc
# importlibs provided by MSVC/Platform SDK. Do NOT search them....
g_msvc_systemlibs = """
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
""".split()
all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'), ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64') ]
all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
def setup_msvc(conf, versions):
platforms = Utils.to_list(conf.env['MSVC_TARGETS']) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions = conf.env['MSVC_VERSIONS'] or [v for v,_ in versions][::-1]
versiondict = dict(versions)
for version in desired_versions:
try:
targets = dict(versiondict [version])
for target in platforms:
try:
arch,(p1,p2,p3) = targets[target]
compiler,revision = version.split()
return compiler,revision,p1,p2,p3
except KeyError: continue
except KeyError: continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf, compiler, version, target, vcvars):
debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
batfile = os.path.join(conf.blddir, 'waf-print-msvc.bat')
f = open(batfile, 'w')
f.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%
""" % (vcvars,target))
f.close()
sout = Utils.cmd_output(['cmd', '/E:on', '/V:on', '/C', batfile])
lines = sout.splitlines()
for x in ('Setting environment', 'Setting SDK environment', 'Intel(R) C++ Compiler'):
if lines[0].find(x) != -1:
break
else:
debug('msvc: get_msvc_version: %r %r %r -> not found', compiler, version, target)
conf.fatal('msvc: Impossible to find a valid architecture for building (in get_msvc_version)')
for line in lines[1:]:
if line.startswith('PATH='):
path = line[5:]
MSVC_PATH = path.split(';')
elif line.startswith('INCLUDE='):
MSVC_INCDIR = [i for i in line[8:].split(';') if i]
elif line.startswith('LIB='):
MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
# Check if the compiler is usable at all.
# The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
env = {}
env.update(os.environ)
env.update(PATH = path)
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
# delete CL if exists. because it could contain parameters wich can change cl's behaviour rather catastrophically.
if env.has_key('CL'):
del(env['CL'])
try:
p = pproc.Popen([cxx, '/help'], env=env, stdout=pproc.PIPE, stderr=pproc.PIPE)
out, err = p.communicate()
if p.returncode != 0:
raise Exception('return code: %r: %r' % (p.returncode, err))
except Exception, e:
debug('msvc: get_msvc_version: %r %r %r -> failure', compiler, version, target)
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
@conf
def gather_wsdk_versions(conf, versions):
version_pattern = re.compile('^v..?.?\...?.?')
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
return
index = 0
while 1:
try:
version = _winreg.EnumKey(all_versions, index)
except WindowsError:
break
index = index + 1
if not version_pattern.match(version):
continue
try:
msvc_version = _winreg.OpenKey(all_versions, version)
path,type = _winreg.QueryValueEx(msvc_version,'InstallationFolder')
except WindowsError:
continue
if os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
targets = []
for target,arch in all_msvc_platforms:
try:
targets.append((target, (arch, conf.get_msvc_version('wsdk', version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd')))))
except Configure.ConfigurationError:
pass
versions.append(('wsdk ' + version[1:], targets))
@conf
def gather_msvc_versions(conf, versions):
# checks SmartPhones SDKs
try:
ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
try:
ce_sdk = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
ce_sdk = ''
if ce_sdk:
supported_wince_platforms = []
ce_index = 0
while 1:
try:
sdk_device = _winreg.EnumKey(ce_sdk, ce_index)
except WindowsError:
break
ce_index = ce_index + 1
sdk = _winreg.OpenKey(ce_sdk, sdk_device)
path,type = _winreg.QueryValueEx(sdk, 'SDKRootDir')
path=str(path)
path,device = os.path.split(path)
if not device:
path,device = os.path.split(path)
for arch,compiler in all_wince_platforms:
platforms = []
if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
if platforms:
supported_wince_platforms.append((device, platforms))
# checks MSVC
version_pattern = re.compile('^..?\...?')
for vcver,vcvar in [('VCExpress','exp'), ('VisualStudio','')]:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\'+vcver)
except WindowsError:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\'+vcver)
except WindowsError:
continue
index = 0
while 1:
try:
version = _winreg.EnumKey(all_versions, index)
except WindowsError:
break
index = index + 1
if not version_pattern.match(version):
continue
try:
msvc_version = _winreg.OpenKey(all_versions, version + "\\Setup\\VS")
path,type = _winreg.QueryValueEx(msvc_version, 'ProductDir')
path=str(path)
targets = []
if ce_sdk:
for device,platforms in supported_wince_platforms:
cetargets = []
for platform,compiler,include,lib in platforms:
winCEpath = os.path.join(path, 'VC', 'ce')
if os.path.isdir(winCEpath):
common_bindirs,_1,_2 = conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat'))
if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)] + common_bindirs
incdirs = [include, os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include')]
libdirs = [lib, os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform)]
cetargets.append((platform, (platform, (bindirs,incdirs,libdirs))))
versions.append((device+' '+version, cetargets))
if os.path.isfile(os.path.join(path, 'VC', 'vcvarsall.bat')):
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target, (realtarget, conf.get_msvc_version('msvc', version, target, os.path.join(path, 'VC', 'vcvarsall.bat')))))
except:
pass
elif os.path.isfile(os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')):
try:
targets.append(('x86', ('x86', conf.get_msvc_version('msvc', version, 'x86', os.path.join(path, 'Common7', 'Tools', 'vsvars32.bat')))))
except Configure.ConfigurationError:
pass
versions.append(('msvc '+version, targets))
except WindowsError:
continue
@conf
def gather_icl_versions(conf, versions):
version_pattern = re.compile('^...?.?\....?.?')
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
except WindowsError:
try:
all_versions = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
except WindowsError:
return
index = 0
while 1:
try:
version = _winreg.EnumKey(all_versions, index)
except WindowsError:
break
index = index + 1
if not version_pattern.match(version):
continue
targets = []
for target,arch in all_icl_platforms:
try:
icl_version = _winreg.OpenKey(all_versions, version+'\\'+target)
path,type = _winreg.QueryValueEx(icl_version,'ProductDir')
if os.path.isfile(os.path.join(path, 'bin', 'iclvars.bat')):
try:
targets.append((target, (arch, conf.get_msvc_version('intel', version, target, os.path.join(path, 'bin', 'iclvars.bat')))))
except Configure.ConfigurationError:
pass
except WindowsError:
continue
major = version[0:2]
versions.append(('intel ' + major, targets))
@conf
def get_msvc_versions(conf):
if not conf.env.MSVC_INSTALLED_VERSIONS:
lst = []
conf.gather_msvc_versions(lst)
conf.gather_wsdk_versions(lst)
conf.gather_icl_versions(lst)
conf.env.MSVC_INSTALLED_VERSIONS = lst
return conf.env.MSVC_INSTALLED_VERSIONS
@conf
def print_all_msvc_detected(conf):
for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
info(version)
for target,l in targets:
info("\t"+target)
def detect_msvc(conf):
versions = get_msvc_versions(conf)
return setup_msvc(conf, versions)
@conf
def find_lt_names_msvc(self, libname, is_static=False):
"""
Win32/MSVC specific code to glean out information from libtool la files.
this function is not attached to the task_gen class
"""
lt_names=[
'lib%s.la' % libname,
'%s.la' % libname,
]
for path in self.env['LIBPATH']:
for la in lt_names:
laf=os.path.join(path,la)
dll=None
if os.path.exists(laf):
ltdict=read_la_file(laf)
lt_libdir=None
if ltdict.get('libdir', ''):
lt_libdir = ltdict['libdir']
if not is_static and ltdict.get('library_names', ''):
dllnames=ltdict['library_names'].split()
dll=dllnames[0].lower()
dll=re.sub('\.dll$', '', dll)
return (lt_libdir, dll, False)
elif ltdict.get('old_library', ''):
olib=ltdict['old_library']
if os.path.exists(os.path.join(path,olib)):
return (path, olib, True)
elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
return (lt_libdir, olib, True)
else:
return (None, olib, True)
else:
raise Utils.WafError('invalid libtool object file: %s' % laf)
return (None, None, None)
@conf
def libname_msvc(self, libname, is_static=False, mandatory=False):
lib = libname.lower()
lib = re.sub('\.lib$','',lib)
if lib in g_msvc_systemlibs:
return lib
lib=re.sub('^lib','',lib)
if lib == 'm':
return None
(lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
if lt_path != None and lt_libname != None:
if lt_static == True:
# file existance check has been made by find_lt_names
return os.path.join(lt_path,lt_libname)
if lt_path != None:
_libpaths=[lt_path] + self.env['LIBPATH']
else:
_libpaths=self.env['LIBPATH']
static_libs=[
'lib%ss.lib' % lib,
'lib%s.lib' % lib,
'%ss.lib' % lib,
'%s.lib' %lib,
]
dynamic_libs=[
'lib%s.dll.lib' % lib,
'lib%s.dll.a' % lib,
'%s.dll.lib' % lib,
'%s.dll.a' % lib,
'lib%s_d.lib' % lib,
'%s_d.lib' % lib,
'%s.lib' %lib,
]
libnames=static_libs
if not is_static:
libnames=dynamic_libs + static_libs
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path, libn)):
debug('msvc: lib found: %s', os.path.join(path,libn))
return re.sub('\.lib$', '',libn)
#if no lib can be found, just return the libname as msvc expects it
if mandatory:
self.fatal("The library %r could not be found" % libname)
return re.sub('\.lib$', '', libname)
@conf
def check_lib_msvc(self, libname, is_static=False, uselib_store=None, mandatory=False):
"This is the api to use"
libn = self.libname_msvc(libname, is_static, mandatory)
if not uselib_store:
uselib_store = libname.upper()
# Note: ideally we should be able to place the lib in the right env var, either STATICLIB or LIB,
# but we don't distinguish static libs from shared libs.
# This is ok since msvc doesn't have any special linker flag to select static libs (no env['STATICLIB_MARKER'])
if False and is_static: # disabled
self.env['STATICLIB_' + uselib_store] = [libn]
else:
self.env['LIB_' + uselib_store] = [libn]
@conf
def check_libs_msvc(self, libnames, is_static=False, mandatory=False):
for libname in Utils.to_list(libnames):
self.check_lib_msvc(libname, is_static, mandatory=mandatory)
@conftest
def no_autodetect(conf):
conf.eval_rules(detect.replace('autodetect', ''))
detect = '''
autodetect
find_msvc
msvc_common_flags
cc_load_tools
cxx_load_tools
cc_add_flags
cxx_add_flags
link_add_flags
'''
@conftest
def autodetect(conf):
v = conf.env
compiler, version, path, includes, libdirs = detect_msvc(conf)
v['PATH'] = path
v['CPPPATH'] = includes
v['LIBPATH'] = libdirs
v['MSVC_COMPILER'] = compiler
def _get_prog_names(conf, compiler):
if compiler=='intel':
compiler_name = 'ICL'
linker_name = 'XILINK'
lib_name = 'XILIB'
else:
# assumes CL.exe
compiler_name = 'CL'
linker_name = 'LINK'
lib_name = 'LIB'
return compiler_name, linker_name, lib_name
@conftest
def find_msvc(conf):
# due to path format limitations, limit operation only to native Win32. Yeah it sucks.
if sys.platform != 'win32':
conf.fatal('MSVC module only works under native Win32 Python! cygwin is not supported yet')
v = conf.env
compiler, version, path, includes, libdirs = detect_msvc(conf)
compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
has_msvc_manifest = (compiler == 'msvc' and float(version) >= 8) or (compiler == 'wsdk' and float(version) >= 6) or (compiler == 'intel' and float(version) >= 11)
# compiler
cxx = None
if v.CXX: cxx = v.CXX
elif 'CXX' in conf.environ: cxx = conf.environ['CXX']
if not cxx: cxx = conf.find_program(compiler_name, var='CXX', path_list=path, mandatory=True)
cxx = conf.cmd_to_list(cxx)
# before setting anything, check if the compiler is really msvc
env = dict(conf.environ)
env.update(PATH = ';'.join(path))
if not Utils.cmd_output([cxx, '/nologo', '/?'], silent=True, env=env):
conf.fatal('the msvc compiler could not be identified')
link = v.LINK_CXX
if not link:
link = conf.find_program(linker_name, path_list=path, mandatory=True)
ar = v.AR
if not ar:
ar = conf.find_program(lib_name, path_list=path, mandatory=True)
# manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
mt = v.MT
if has_msvc_manifest:
mt = conf.find_program('MT', path_list=path, mandatory=True)
# no more possibility of failure means the data state will be consistent
# we may store the data safely now
v.MSVC_MANIFEST = has_msvc_manifest
v.PATH = path
v.CPPPATH = includes
v.LIBPATH = libdirs
# c/c++ compiler
v.CC = v.CXX = cxx
v.CC_NAME = v.CXX_NAME = 'msvc'
v.LINK = v.LINK_CXX = link
if not v.LINK_CC:
v.LINK_CC = v.LINK_CXX
v.AR = ar
v.MT = mt
v.MTFLAGS = v.ARFLAGS = ['/NOLOGO']
conf.check_tool('winres')
if not conf.env.WINRC:
warn('Resource compiler not found. Compiling resource file is disabled')
# environment flags
try: v.prepend_value('CPPPATH', conf.environ['INCLUDE'])
except KeyError: pass
try: v.prepend_value('LIBPATH', conf.environ['LIB'])
except KeyError: pass
@conftest
def msvc_common_flags(conf):
v = conf.env
v['CPPFLAGS'] = ['/W3', '/nologo']
v['CCDEFINES_ST'] = '/D%s'
v['CXXDEFINES_ST'] = '/D%s'
# TODO just use _WIN32, which defined by the compiler itself!
v['CCDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
v['CXXDEFINES'] = ['WIN32'] # avoid using this, any compiler predefines the _WIN32 marcro anyway
v['_CCINCFLAGS'] = []
v['_CCDEFFLAGS'] = []
v['_CXXINCFLAGS'] = []
v['_CXXDEFFLAGS'] = []
v['CC_SRC_F'] = ''
v['CC_TGT_F'] = ['/c', '/Fo']
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['/c', '/Fo']
v['CPPPATH_ST'] = '/I%s' # template for adding include paths
v['AR_TGT_F'] = v['CCLNK_TGT_F'] = v['CXXLNK_TGT_F'] = '/OUT:'
# Subsystem specific flags
v['CPPFLAGS_CONSOLE'] = ['/SUBSYSTEM:CONSOLE']
v['CPPFLAGS_NATIVE'] = ['/SUBSYSTEM:NATIVE']
v['CPPFLAGS_POSIX'] = ['/SUBSYSTEM:POSIX']
v['CPPFLAGS_WINDOWS'] = ['/SUBSYSTEM:WINDOWS']
v['CPPFLAGS_WINDOWSCE'] = ['/SUBSYSTEM:WINDOWSCE']
# CRT specific flags
v['CPPFLAGS_CRT_MULTITHREADED'] = ['/MT']
v['CPPFLAGS_CRT_MULTITHREADED_DLL'] = ['/MD']
# TODO these are defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED'] = ['_MT'] # this is defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED_DLL'] = ['_MT', '_DLL'] # these are defined by the compiler itself!
v['CPPFLAGS_CRT_MULTITHREADED_DBG'] = ['/MTd']
v['CPPFLAGS_CRT_MULTITHREADED_DLL_DBG'] = ['/MDd']
# TODO these are defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED_DBG'] = ['_DEBUG', '_MT'] # these are defined by the compiler itself!
v['CPPDEFINES_CRT_MULTITHREADED_DLL_DBG'] = ['_DEBUG', '_MT', '_DLL'] # these are defined by the compiler itself!
# compiler debug levels
v['CCFLAGS'] = ['/TC']
v['CCFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
v['CCFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
v['CCFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
v['CCFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
v['CXXFLAGS'] = ['/TP', '/EHsc']
v['CXXFLAGS_OPTIMIZED'] = ['/O2', '/DNDEBUG']
v['CXXFLAGS_RELEASE'] = ['/O2', '/DNDEBUG']
v['CXXFLAGS_DEBUG'] = ['/Od', '/RTC1', '/ZI']
v['CXXFLAGS_ULTRADEBUG'] = ['/Od', '/RTC1', '/ZI']
# linker
v['LIB'] = []
v['LIB_ST'] = '%s.lib' # template for adding libs
v['LIBPATH_ST'] = '/LIBPATH:%s' # template for adding libpaths
v['STATICLIB_ST'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
v['STATICLIBPATH_ST'] = '/LIBPATH:%s'
v['LINKFLAGS'] = ['/NOLOGO', '/MANIFEST']
v['LINKFLAGS_DEBUG'] = ['/DEBUG']
v['LINKFLAGS_ULTRADEBUG'] = ['/DEBUG']
# shared library
v['shlib_CCFLAGS'] = ['']
v['shlib_CXXFLAGS'] = ['']
v['shlib_LINKFLAGS']= ['/DLL']
v['shlib_PATTERN'] = '%s.dll'
v['implib_PATTERN'] = '%s.lib'
v['IMPLIB_ST'] = '/IMPLIB:%s'
# static library
v['staticlib_LINKFLAGS'] = ['']
v['staticlib_PATTERN'] = 'lib%s.lib' # Note: to be able to distinguish between a static lib and a dll import lib, it's a good pratice to name the static lib 'lib%s.lib' and the dll import lib '%s.lib'
# program
v['program_PATTERN'] = '%s.exe'
#######################################################################################################
##### conf above, build below
@after('apply_link')
@feature('cc', 'cxx')
def apply_flags_msvc(self):
if self.env.CC_NAME != 'msvc' or not self.link_task:
return
subsystem = getattr(self, 'subsystem', '')
if subsystem:
subsystem = '/subsystem:%s' % subsystem
flags = 'cstaticlib' in self.features and 'ARFLAGS' or 'LINKFLAGS'
self.env.append_value(flags, subsystem)
if getattr(self, 'link_task', None) and not 'cstaticlib' in self.features:
for f in self.env.LINKFLAGS:
d = f.lower()
if d[1:] == 'debug':
pdbnode = self.link_task.outputs[0].change_ext('.pdb')
pdbfile = pdbnode.bldpath(self.env)
self.link_task.outputs.append(pdbnode)
self.bld.install_files(self.install_path, [pdbnode], env=self.env)
break
@feature('cprogram', 'cshlib', 'cstaticlib')
@after('apply_lib_vars')
@before('apply_obj_vars')
def apply_obj_vars_msvc(self):
if self.env['CC_NAME'] != 'msvc':
return
try:
self.meths.remove('apply_obj_vars')
except ValueError:
pass
libpaths = getattr(self, 'libpaths', [])
if not libpaths: self.libpaths = libpaths
env = self.env
app = env.append_unique
cpppath_st = env['CPPPATH_ST']
lib_st = env['LIB_ST']
staticlib_st = env['STATICLIB_ST']
libpath_st = env['LIBPATH_ST']
staticlibpath_st = env['STATICLIBPATH_ST']
for i in env['LIBPATH']:
app('LINKFLAGS', libpath_st % i)
if not libpaths.count(i):
libpaths.append(i)
for i in env['LIBPATH']:
app('LINKFLAGS', staticlibpath_st % i)
if not libpaths.count(i):
libpaths.append(i)
# i doubt that anyone will make a fully static binary anyway
if not env['FULLSTATIC']:
if env['STATICLIB'] or env['LIB']:
app('LINKFLAGS', env['SHLIB_MARKER']) # TODO does SHLIB_MARKER work?
for i in env['STATICLIB']:
app('LINKFLAGS', staticlib_st % i)
for i in env['LIB']:
app('LINKFLAGS', lib_st % i)
# split the manifest file processing from the link task, like for the rc processing
@feature('cprogram', 'cshlib')
@after('apply_link')
def apply_manifest(self):
"""Special linker for MSVC with support for embedding manifests into DLL's
and executables compiled by Visual Studio 2005 or probably later. Without
the manifest file, the binaries are unusable.
See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx"""
if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST:
out_node = self.link_task.outputs[0]
man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
self.link_task.outputs.append(man_node)
self.link_task.do_manifest = True
def exec_mf(self):
env = self.env
mtool = env['MT']
if not mtool:
return 0
self.do_manifest = False
outfile = self.outputs[0].bldpath(env)
manifest = None
for out_node in self.outputs:
if out_node.name.endswith('.manifest'):
manifest = out_node.bldpath(env)
break
if manifest is None:
# Should never get here. If we do, it means the manifest file was
# never added to the outputs list, thus we don't have a manifest file
# to embed, so we just return.
return 0
# embedding mode. Different for EXE's and DLL's.
# see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
mode = ''
if 'cprogram' in self.generator.features:
mode = '1'
elif 'cshlib' in self.generator.features:
mode = '2'
debug('msvc: embedding manifest')
#flags = ' '.join(env['MTFLAGS'] or [])
lst = []
lst.extend([env['MT']])
lst.extend(Utils.to_list(env['MTFLAGS']))
lst.extend(Utils.to_list("-manifest"))
lst.extend(Utils.to_list(manifest))
lst.extend(Utils.to_list("-outputresource:%s;%s" % (outfile, mode)))
#cmd='%s %s -manifest "%s" -outputresource:"%s";#%s' % (mtool, flags,
# manifest, outfile, mode)
lst = [lst]
return self.exec_command(*lst)
########## stupid evil command modification: concatenate the tokens /Fx, /doc, and /x: with the next token
def exec_command_msvc(self, *k, **kw):
"instead of quoting all the paths and keep using the shell, we can just join the options msvc is interested in"
if self.env['CC_NAME'] == 'msvc':
if isinstance(k[0], list):
lst = []
carry = ''
for a in k[0]:
if len(a) == 3 and a.startswith('/F') or a == '/doc' or a[-1] == ':':
carry = a
else:
lst.append(carry + a)
carry = ''
k = [lst]
env = dict(os.environ)
env.update(PATH = ';'.join(self.env['PATH']))
kw['env'] = env
ret = self.generator.bld.exec_command(*k, **kw)
if ret: return ret
if getattr(self, 'do_manifest', None):
ret = exec_mf(self)
return ret
for k in 'cc cxx winrc cc_link cxx_link static_link qxx'.split():
cls = Task.TaskBase.classes.get(k, None)
if cls:
cls.exec_command = exec_command_msvc

298
tools/wafadmin/Tools/ocaml.py

@ -1,298 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"ocaml support"
import os, re
import TaskGen, Utils, Task, Build
from Logs import error
from TaskGen import taskgen, feature, before, after, extension
EXT_MLL = ['.mll']
EXT_MLY = ['.mly']
EXT_MLI = ['.mli']
EXT_MLC = ['.c']
EXT_ML = ['.ml']
open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
def filter_comments(txt):
meh = [0]
def repl(m):
if m.group(1): meh[0] += 1
elif m.group(2): meh[0] -= 1
elif not meh[0]: return m.group(0)
return ''
return foo.sub(repl, txt)
def scan(self):
node = self.inputs[0]
code = filter_comments(node.read(self.env))
global open_re
names = []
import_iterator = open_re.finditer(code)
if import_iterator:
for import_match in import_iterator:
names.append(import_match.group(1))
found_lst = []
raw_lst = []
for name in names:
nd = None
for x in self.incpaths:
nd = x.find_resource(name.lower()+'.ml')
if not nd: nd = x.find_resource(name+'.ml')
if nd:
found_lst.append(nd)
break
else:
raw_lst.append(name)
return (found_lst, raw_lst)
native_lst=['native', 'all', 'c_object']
bytecode_lst=['bytecode', 'all']
class ocaml_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('ocaml')
def init_ml(self):
Utils.def_attrs(self,
type = 'all',
incpaths_lst = [],
bld_incpaths_lst = [],
mlltasks = [],
mlytasks = [],
mlitasks = [],
native_tasks = [],
bytecode_tasks = [],
linktasks = [],
bytecode_env = None,
native_env = None,
compiled_tasks = [],
includes = '',
uselib = '',
are_deps_set = 0)
@feature('ocaml')
@after('init_ml')
def init_envs_ml(self):
self.islibrary = getattr(self, 'islibrary', False)
global native_lst, bytecode_lst
self.native_env = None
if self.type in native_lst:
self.native_env = self.env.copy()
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
self.bytecode_env = None
if self.type in bytecode_lst:
self.bytecode_env = self.env.copy()
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
if self.type == 'c_object':
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
@feature('ocaml')
@before('apply_vars_ml')
@after('init_envs_ml')
def apply_incpaths_ml(self):
inc_lst = self.includes.split()
lst = self.incpaths_lst
for dir in inc_lst:
node = self.path.find_dir(dir)
if not node:
error("node not found: " + str(dir))
continue
self.bld.rescan(node)
if not node in lst: lst.append(node)
self.bld_incpaths_lst.append(node)
# now the nodes are added to self.incpaths_lst
@feature('ocaml')
@before('apply_core')
def apply_vars_ml(self):
for i in self.incpaths_lst:
if self.bytecode_env:
app = self.bytecode_env.append_value
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.srcpath(self.env))
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.bldpath(self.env))
if self.native_env:
app = self.native_env.append_value
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.bldpath(self.env))
app('OCAMLPATH', '-I')
app('OCAMLPATH', i.srcpath(self.env))
varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
for name in self.uselib.split():
for vname in varnames:
cnt = self.env[vname+'_'+name]
if cnt:
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
if self.native_env: self.native_env.append_value(vname, cnt)
@feature('ocaml')
@after('apply_core')
def apply_link_ml(self):
if self.bytecode_env:
ext = self.islibrary and '.cma' or '.run'
linktask = self.create_task('ocalink')
linktask.bytecode = 1
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.obj = self
linktask.env = self.bytecode_env
self.linktasks.append(linktask)
if self.native_env:
if self.type == 'c_object': ext = '.o'
elif self.islibrary: ext = '.cmxa'
else: ext = ''
linktask = self.create_task('ocalinkx')
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.obj = self
linktask.env = self.native_env
self.linktasks.append(linktask)
# we produce a .o file to be used by gcc
self.compiled_tasks.append(linktask)
@extension(EXT_MLL)
def mll_hook(self, node):
mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'), env=self.native_env)
self.mlltasks.append(mll_task)
self.allnodes.append(mll_task.outputs[0])
@extension(EXT_MLY)
def mly_hook(self, node):
mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')], env=self.native_env)
self.mlytasks.append(mly_task)
self.allnodes.append(mly_task.outputs[0])
task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'), env=self.native_env)
@extension(EXT_MLI)
def mli_hook(self, node):
task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'), env=self.native_env)
self.mlitasks.append(task)
@extension(EXT_MLC)
def mlc_hook(self, node):
task = self.create_task('ocamlcc', node, node.change_ext('.o'), env=self.native_env)
self.compiled_tasks.append(task)
@extension(EXT_ML)
def ml_hook(self, node):
if self.native_env:
task = self.create_task('ocamlx', node, node.change_ext('.cmx'), env=self.native_env)
task.obj = self
task.incpaths = self.bld_incpaths_lst
self.native_tasks.append(task)
if self.bytecode_env:
task = self.create_task('ocaml', node, node.change_ext('.cmo'), env=self.bytecode_env)
task.obj = self
task.bytecode = 1
task.incpaths = self.bld_incpaths_lst
self.bytecode_tasks.append(task)
def compile_may_start(self):
if not getattr(self, 'flag_deps', ''):
self.flag_deps = 1
# the evil part is that we can only compute the dependencies after the
# source files can be read (this means actually producing the source files)
if getattr(self, 'bytecode', ''): alltasks = self.obj.bytecode_tasks
else: alltasks = self.obj.native_tasks
self.signature() # ensure that files are scanned - unfortunately
tree = self.generator.bld
env = self.env
for node in self.inputs:
lst = tree.node_deps[self.unique_id()]
for depnode in lst:
for t in alltasks:
if t == self: continue
if depnode in t.inputs:
self.set_run_after(t)
# TODO necessary to get the signature right - for now
delattr(self, 'cache_sig')
self.signature()
return Task.Task.runnable_status(self)
b = Task.simple_task_type
cls = b('ocamlx', '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
cls.runnable_status = compile_may_start
cls.scan = scan
b = Task.simple_task_type
cls = b('ocaml', '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${INCLUDES} -c -o ${TGT} ${SRC}', color='GREEN', shell=False)
cls.runnable_status = compile_may_start
cls.scan = scan
b('ocamlcmi', '${OCAMLC} ${OCAMLPATH} ${INCLUDES} -o ${TGT} -c ${SRC}', color='BLUE', before="ocaml ocamlcc ocamlx")
b('ocamlcc', 'cd ${TGT[0].bld_dir(env)} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${INCLUDES} -c ${SRC[0].abspath(env)}', color='GREEN')
b('ocamllex', '${OCAMLLEX} ${SRC} -o ${TGT}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
b('ocamlyacc', '${OCAMLYACC} -b ${TGT[0].bld_base(env)} ${SRC}', color='BLUE', before="ocamlcmi ocaml ocamlcc")
def link_may_start(self):
if not getattr(self, 'order', ''):
# now reorder the inputs given the task dependencies
if getattr(self, 'bytecode', 0): alltasks = self.obj.bytecode_tasks
else: alltasks = self.obj.native_tasks
# this part is difficult, we do not have a total order on the tasks
# if the dependencies are wrong, this may not stop
seen = []
pendant = []+alltasks
while pendant:
task = pendant.pop(0)
if task in seen: continue
for x in task.run_after:
if not x in seen:
pendant.append(task)
break
else:
seen.append(task)
self.inputs = [x.outputs[0] for x in seen]
self.order = 1
return Task.Task.runnable_status(self)
act = b('ocalink', '${OCAMLC} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS} ${SRC}', color='YELLOW', after="ocaml ocamlcc")
act.runnable_status = link_may_start
act = b('ocalinkx', '${OCAMLOPT} -o ${TGT} ${INCLUDES} ${OCALINKFLAGS_OPT} ${SRC}', color='YELLOW', after="ocamlx ocamlcc")
act.runnable_status = link_may_start
def detect(conf):
opt = conf.find_program('ocamlopt', var='OCAMLOPT')
occ = conf.find_program('ocamlc', var='OCAMLC')
if (not opt) or (not occ):
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
v = conf.env
v['OCAMLC'] = occ
v['OCAMLOPT'] = opt
v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX')
v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC')
v['OCAMLFLAGS'] = ''
v['OCAMLLIB'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['LIBPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['CPPPATH_OCAML'] = Utils.cmd_output(conf.env['OCAMLC']+' -where').strip()+os.sep
v['LIB_OCAML'] = 'camlrun'

109
tools/wafadmin/Tools/perl.py

@ -1,109 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# andersg at 0x63.nu 2007
import os
import Task, Options, Utils
from Configure import conf
from TaskGen import extension, taskgen, feature, before
xsubpp_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
EXT_XS = ['.xs']
@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars')
@feature('perlext')
def init_perlext(self):
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'PERL' in self.uselib: self.uselib.append('PERL')
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
self.env['shlib_PATTERN'] = self.env['perlext_PATTERN']
@extension(EXT_XS)
def xsubpp_file(self, node):
outnode = node.change_ext('.c')
self.create_task('xsubpp', node, outnode)
self.allnodes.append(outnode)
Task.simple_task_type('xsubpp', xsubpp_str, color='BLUE', before='cc cxx', shell=False)
@conf
def check_perl_version(conf, minver=None):
"""
Checks if perl is installed.
If installed the variable PERL will be set in environment.
Perl binary can be overridden by --with-perl-binary config variable
"""
if getattr(Options.options, 'perlbinary', None):
conf.env.PERL = Options.options.perlbinary
else:
conf.find_program('perl', var='PERL', mandatory=True)
try:
version = Utils.cmd_output([conf.env.PERL, '-e', 'printf "%vd",$^V'])
except:
conf.fatal('could not determine the perl version')
conf.env.PERL_VERSION = version
cver = ''
if minver:
try:
ver = tuple(map(int, version.split('.')))
except:
conf.fatal('unsupported perl version %r' % version)
if ver < minver:
conf.fatal('perl is too old')
cver = '.'.join(map(str,minver))
conf.check_message('perl', cver, True, version)
@conf
def check_perl_module(conf, module):
"""
Check if specified perlmodule is installed.
Minimum version can be specified by specifying it after modulename
like this:
conf.check_perl_module("Some::Module 2.92")
"""
cmd = [conf.env['PERL'], '-e', 'use %s' % module]
r = Utils.pproc.call(cmd, stdout=Utils.pproc.PIPE, stderr=Utils.pproc.PIPE) == 0
conf.check_message("perl module %s" % module, "", r)
return r
@conf
def check_perl_ext_devel(conf):
"""
Check for configuration needed to build perl extensions.
Sets different xxx_PERLEXT variables in the environment.
Also sets the ARCHDIR_PERL variable useful as installation path,
which can be overridden by --with-perl-archdir
"""
if not conf.env.PERL:
conf.fatal('perl detection is required first')
def read_out(cmd):
return Utils.to_list(Utils.cmd_output([conf.env.PERL, '-MConfig', '-e', cmd]))
conf.env.LINKFLAGS_PERLEXT = read_out('print $Config{lddlflags}')
conf.env.CPPPATH_PERLEXT = read_out('print "$Config{archlib}/CORE"')
conf.env.CCFLAGS_PERLEXT = read_out('print "$Config{ccflags} $Config{cccdlflags}"')
conf.env.XSUBPP = read_out('print "$Config{privlib}/ExtUtils/xsubpp$Config{exe_ext}"')
conf.env.EXTUTILS_TYPEMAP = read_out('print "$Config{privlib}/ExtUtils/typemap"')
conf.env.perlext_PATTERN = '%s.' + read_out('print $Config{dlext}')[0]
if getattr(Options.options, 'perlarchdir', None):
conf.env.ARCHDIR_PERL = Options.options.perlarchdir
else:
conf.env.ARCHDIR_PERL = read_out('print $Config{sitearch}')[0]
def set_options(opt):
opt.add_option("--with-perl-binary", type="string", dest="perlbinary", help = 'Specify alternate perl binary', default=None)
opt.add_option("--with-perl-archdir", type="string", dest="perlarchdir", help = 'Specify directory where to install arch specific files', default=None)

505
tools/wafadmin/Tools/qt4.py

@ -1,505 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"""
Qt4 support
If QT4_ROOT is given (absolute path), the configuration will look in it first
This module also demonstrates how to add tasks dynamically (when the build has started)
"""
try:
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
except ImportError:
has_xml = False
ContentHandler = object
else:
has_xml = True
import os, sys
import ccroot, cxx
import TaskGen, Task, Utils, Runner, Options, Node, Configure
from TaskGen import taskgen, feature, after, extension
from Logs import error
from Constants import *
MOC_H = ['.h', '.hpp', '.hxx', '.hh']
EXT_RCC = ['.qrc']
EXT_UI = ['.ui']
EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
class qxx_task(Task.Task):
"A cpp task that may create a moc task dynamically"
before = ['cxx_link', 'static_link']
def __init__(self, *k, **kw):
Task.Task.__init__(self, *k, **kw)
self.moc_done = 0
def scan(self):
(nodes, names) = ccroot.scan(self)
# for some reasons (variants) the moc node may end in the list of node deps
for x in nodes:
if x.name.endswith('.moc'):
nodes.remove(x)
names.append(x.relpath_gen(self.inputs[0].parent))
return (nodes, names)
def runnable_status(self):
if self.moc_done:
# if there is a moc task, delay the computation of the file signature
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
# the moc file enters in the dependency calculation
# so we need to recompute the signature when the moc file is present
self.signature()
return Task.Task.runnable_status(self)
else:
# yes, really, there are people who generate cxx files
for t in self.run_after:
if not t.hasrun:
return ASK_LATER
self.add_moc_tasks()
return ASK_LATER
def add_moc_tasks(self):
node = self.inputs[0]
tree = node.__class__.bld
try:
# compute the signature once to know if there is a moc file to create
self.signature()
except KeyError:
# the moc file may be referenced somewhere else
pass
else:
# remove the signature, it must be recomputed with the moc task
delattr(self, 'cache_sig')
moctasks=[]
mocfiles=[]
variant = node.variant(self.env)
try:
tmp_lst = tree.raw_deps[self.unique_id()]
tree.raw_deps[self.unique_id()] = []
except KeyError:
tmp_lst = []
for d in tmp_lst:
if not d.endswith('.moc'): continue
# paranoid check
if d in mocfiles:
error("paranoia owns")
continue
# process that base.moc only once
mocfiles.append(d)
# find the extension (performed only when the .cpp has changes)
base2 = d[:-4]
for path in [node.parent] + self.generator.env['INC_PATHS']:
tree.rescan(path)
vals = getattr(Options.options, 'qt_header_ext', '') or MOC_H
for ex in vals:
h_node = path.find_resource(base2 + ex)
if h_node:
break
else:
continue
break
else:
raise Utils.WafError("no header found for %s which is a moc file" % str(d))
m_node = h_node.change_ext('.moc')
tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), m_node.name)] = h_node
# create the task
task = Task.TaskBase.classes['moc'](self.env, normal=0)
task.set_inputs(h_node)
task.set_outputs(m_node)
generator = tree.generator
generator.outstanding.insert(0, task)
generator.total += 1
moctasks.append(task)
# remove raw deps except the moc files to save space (optimization)
tmp_lst = tree.raw_deps[self.unique_id()] = mocfiles
# look at the file inputs, it is set right above
lst = tree.node_deps.get(self.unique_id(), ())
for d in lst:
name = d.name
if name.endswith('.moc'):
task = Task.TaskBase.classes['moc'](self.env, normal=0)
task.set_inputs(tree.node_deps[(self.inputs[0].parent.id, self.env.variant(), name)]) # 1st element in a tuple
task.set_outputs(d)
generator = tree.generator
generator.outstanding.insert(0, task)
generator.total += 1
moctasks.append(task)
# simple scheduler dependency: run the moc task before others
self.run_after = moctasks
self.moc_done = 1
run = Task.TaskBase.classes['cxx'].__dict__['run']
def translation_update(task):
outs = [a.abspath(task.env) for a in task.outputs]
outs = " ".join(outs)
lupdate = task.env['QT_LUPDATE']
for x in task.inputs:
file = x.abspath(task.env)
cmd = "%s %s -ts %s" % (lupdate, file, outs)
Utils.pprint('BLUE', cmd)
task.generator.bld.exec_command(cmd)
class XMLHandler(ContentHandler):
def __init__(self):
self.buf = []
self.files = []
def startElement(self, name, attrs):
if name == 'file':
self.buf = []
def endElement(self, name):
if name == 'file':
self.files.append(''.join(self.buf))
def characters(self, cars):
self.buf.append(cars)
def scan(self):
"add the dependency on the files referenced in the qrc"
node = self.inputs[0]
parser = make_parser()
curHandler = XMLHandler()
parser.setContentHandler(curHandler)
fi = open(self.inputs[0].abspath(self.env))
parser.parse(fi)
fi.close()
nodes = []
names = []
root = self.inputs[0].parent
for x in curHandler.files:
nd = root.find_resource(x)
if nd: nodes.append(nd)
else: names.append(x)
return (nodes, names)
@extension(EXT_RCC)
def create_rcc_task(self, node):
"hook for rcc files"
rcnode = node.change_ext('_rc.cpp')
rcctask = self.create_task('rcc', node, rcnode)
cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
self.compiled_tasks.append(cpptask)
return cpptask
@extension(EXT_UI)
def create_uic_task(self, node):
"hook for uic tasks"
uictask = self.create_task('ui4', node)
uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
return uictask
class qt4_taskgen(cxx.cxx_taskgen):
def __init__(self, *k, **kw):
cxx.cxx_taskgen.__init__(self, *k, **kw)
self.features.append('qt4')
@extension('.ts')
def add_lang(self, node):
"""add all the .ts file into self.lang"""
self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
@feature('qt4')
@after('apply_link')
def apply_qt4(self):
if getattr(self, 'lang', None):
update = getattr(self, 'update', None)
lst=[]
trans=[]
for l in self.to_list(self.lang):
if not isinstance(l, Node.Node):
l = self.path.find_resource(l+'.ts')
t = self.create_task('ts2qm', l, l.change_ext('.qm'))
lst.append(t.outputs[0])
if update:
trans.append(t.inputs[0])
trans_qt4 = getattr(Options.options, 'trans_qt4', False)
if update and trans_qt4:
# we need the cpp files given, except the rcc task we create after
# FIXME may be broken
u = Task.TaskCmd(translation_update, self.env, 2)
u.inputs = [a.inputs[0] for a in self.compiled_tasks]
u.outputs = trans
if getattr(self, 'langname', None):
t = Task.TaskBase.classes['qm2rcc'](self.env)
t.set_inputs(lst)
t.set_outputs(self.path.find_or_declare(self.langname+'.qrc'))
t.path = self.path
k = create_rcc_task(self, t.outputs[0])
self.link_task.inputs.append(k.outputs[0])
self.env.append_value('MOC_FLAGS', self.env._CXXDEFFLAGS)
self.env.append_value('MOC_FLAGS', self.env._CXXINCFLAGS)
@extension(EXT_QT4)
def cxx_hook(self, node):
# create the compilation task: cpp or cc
try: obj_ext = self.obj_ext
except AttributeError: obj_ext = '_%d.o' % self.idx
task = self.create_task('qxx', node, node.change_ext(obj_ext))
self.compiled_tasks.append(task)
return task
def process_qm2rcc(task):
outfile = task.outputs[0].abspath(task.env)
f = open(outfile, 'w')
f.write('<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n')
for k in task.inputs:
f.write(' <file>')
#f.write(k.name)
f.write(k.path_to_parent(task.path))
f.write('</file>\n')
f.write('</qresource>\n</RCC>')
f.close()
b = Task.simple_task_type
b('moc', '${QT_MOC} ${MOC_FLAGS} ${SRC} ${MOC_ST} ${TGT}', color='BLUE', vars=['QT_MOC', 'MOC_FLAGS'], shell=False)
cls = b('rcc', '${QT_RCC} -name ${SRC[0].name} ${SRC[0].abspath(env)} ${RCC_ST} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', after="qm2rcc", shell=False)
cls.scan = scan
b('ui4', '${QT_UIC} ${SRC} -o ${TGT}', color='BLUE', before='cxx moc qxx_task', shell=False)
b('ts2qm', '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}', color='BLUE', before='qm2rcc', shell=False)
Task.task_type_from_func('qm2rcc', vars=[], func=process_qm2rcc, color='BLUE', before='rcc', after='ts2qm')
def detect_qt4(conf):
env = conf.env
opt = Options.options
qtdir = getattr(opt, 'qtdir', '')
qtbin = getattr(opt, 'qtbin', '')
qtlibs = getattr(opt, 'qtlibs', '')
useframework = getattr(opt, 'use_qt4_osxframework', True)
paths = []
# the path to qmake has been given explicitely
if qtbin:
paths = [qtbin]
# the qt directory has been given - we deduce the qt binary path
if not qtdir:
qtdir = conf.environ.get('QT4_ROOT', '')
qtbin = os.path.join(qtdir, 'bin')
paths = [qtbin]
# no qtdir, look in the path and in /usr/local/Trolltech
if not qtdir:
paths = os.environ.get('PATH', '').split(os.pathsep)
paths.append('/usr/share/qt4/bin/')
try:
lst = os.listdir('/usr/local/Trolltech/')
except OSError:
pass
else:
if lst:
lst.sort()
lst.reverse()
# keep the highest version
qtdir = '/usr/local/Trolltech/%s/' % lst[0]
qtbin = os.path.join(qtdir, 'bin')
paths.append(qtbin)
# at the end, try to find qmake in the paths given
# keep the one with the highest version
cand = None
prev_ver = ['4', '0', '0']
for qmk in ['qmake-qt4', 'qmake4', 'qmake']:
qmake = conf.find_program(qmk, path_list=paths)
if qmake:
try:
version = Utils.cmd_output([qmake, '-query', 'QT_VERSION']).strip()
except ValueError:
pass
else:
if version:
new_ver = version.split('.')
if new_ver > prev_ver:
cand = qmake
prev_ver = new_ver
if cand:
qmake = cand
else:
conf.fatal('could not find qmake for qt4')
conf.env.QMAKE = qmake
qtincludes = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_HEADERS']).strip()
qtdir = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
qtbin = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_BINS']).strip() + os.sep
if not qtlibs:
try:
qtlibs = Utils.cmd_output([qmake, '-query', 'QT_INSTALL_LIBS']).strip() + os.sep
except ValueError:
qtlibs = os.path.join(qtdir, 'lib')
def find_bin(lst, var):
for f in lst:
ret = conf.find_program(f, path_list=paths)
if ret:
env[var]=ret
break
vars = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtWebKit Qt3Support".split()
find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
find_bin(['uic-qt4', 'uic'], 'QT_UIC')
if not env['QT_UIC']:
conf.fatal('cannot find the uic compiler for qt4')
try:
version = Utils.cmd_output(env['QT_UIC'] + " -version 2>&1").strip()
except ValueError:
conf.fatal('your uic compiler is for qt3, add uic for qt4 to your path')
version = version.replace('Qt User Interface Compiler ','')
version = version.replace('User Interface Compiler for Qt', '')
if version.find(" 3.") != -1:
conf.check_message('uic version', '(too old)', 0, option='(%s)'%version)
sys.exit(1)
conf.check_message('uic version', '', 1, option='(%s)'%version)
find_bin(['moc-qt4', 'moc'], 'QT_MOC')
find_bin(['rcc'], 'QT_RCC')
find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
env['UIC3_ST']= '%s -o %s'
env['UIC_ST'] = '%s -o %s'
env['MOC_ST'] = '-o'
env['ui_PATTERN'] = 'ui_%s.h'
env['QT_LRELEASE_FLAGS'] = ['-silent']
vars_debug = [a+'_debug' for a in vars]
try:
conf.find_program('pkg-config', var='pkgconfig', path_list=paths, mandatory=True)
except Configure.ConfigurationError:
for lib in vars_debug+vars:
uselib = lib.upper()
d = (lib.find('_debug') > 0) and 'd' or ''
# original author seems to prefer static to shared libraries
for (pat, kind) in ((conf.env.staticlib_PATTERN, 'STATIC'), (conf.env.shlib_PATTERN, '')):
conf.check_message_1('Checking for %s %s' % (lib, kind))
for ext in ['', '4']:
path = os.path.join(qtlibs, pat % (lib + d + ext))
if os.path.exists(path):
env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
conf.check_message_2('ok ' + path, 'GREEN')
break
path = os.path.join(qtbin, pat % (lib + d + ext))
if os.path.exists(path):
env.append_unique(kind + 'LIB_' + uselib, lib + d + ext)
conf.check_message_2('ok ' + path, 'GREEN')
break
else:
conf.check_message_2('not found', 'YELLOW')
continue
break
env.append_unique('LIBPATH_' + uselib, qtlibs)
env.append_unique('CPPPATH_' + uselib, qtincludes)
env.append_unique('CPPPATH_' + uselib, qtincludes + os.sep + lib)
else:
for i in vars_debug+vars:
try:
conf.check_cfg(package=i, args='--cflags --libs --silence-errors', path=conf.env.pkgconfig)
except ValueError:
pass
# the libpaths are set nicely, unfortunately they make really long command-lines
# remove the qtcore ones from qtgui, etc
def process_lib(vars_, coreval):
for d in vars_:
var = d.upper()
if var == 'QTCORE': continue
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if lib in core: continue
accu.append(lib)
env['LIBPATH_'+var] = accu
process_lib(vars, 'LIBPATH_QTCORE')
process_lib(vars_debug, 'LIBPATH_QTCORE_DEBUG')
# rpath if wanted
want_rpath = getattr(Options.options, 'want_rpath', 1)
if want_rpath:
def process_rpath(vars_, coreval):
for d in vars_:
var = d.upper()
value = env['LIBPATH_'+var]
if value:
core = env[coreval]
accu = []
for lib in value:
if var != 'QTCORE':
if lib in core:
continue
accu.append('-Wl,--rpath='+lib)
env['RPATH_'+var] = accu
process_rpath(vars, 'LIBPATH_QTCORE')
process_rpath(vars_debug, 'LIBPATH_QTCORE_DEBUG')
env['QTLOCALE'] = str(env['PREFIX'])+'/share/locale'
def detect(conf):
detect_qt4(conf)
def set_options(opt):
opt.add_option('--want-rpath', type='int', default=1, dest='want_rpath', help='set rpath to 1 or 0 [Default 1]')
opt.add_option('--header-ext',
type='string',
default='',
help='header extension for moc files',
dest='qt_header_ext')
for i in 'qtdir qtbin qtlibs'.split():
opt.add_option('--'+i, type='string', default='', dest=i)
if sys.platform == "darwin":
opt.add_option('--no-qt4-framework', action="store_false", help='do not use the framework version of Qt4 in OS X', dest='use_qt4_osxframework',default=True)
opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)

120
tools/wafadmin/Tools/ruby.py

@ -1,120 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# daniel.svensson at purplescout.se 2008
import os
import Task, Options, Utils
from TaskGen import before, feature, after
from Configure import conf
@feature('rubyext')
@before('apply_incpaths', 'apply_type_vars', 'apply_lib_vars', 'apply_bundle')
@after('default_cc', 'vars_target_cshlib')
def init_rubyext(self):
self.default_install_path = '${ARCHDIR_RUBY}'
self.uselib = self.to_list(getattr(self, 'uselib', ''))
if not 'RUBY' in self.uselib:
self.uselib.append('RUBY')
if not 'RUBYEXT' in self.uselib:
self.uselib.append('RUBYEXT')
@feature('rubyext')
@before('apply_link')
def apply_ruby_so_name(self):
self.env['shlib_PATTERN'] = self.env['rubyext_PATTERN']
@conf
def check_ruby_version(conf, minver=()):
"""
Checks if ruby is installed.
If installed the variable RUBY will be set in environment.
Ruby binary can be overridden by --with-ruby-binary config variable
"""
if Options.options.rubybinary:
conf.env.RUBY = Options.options.rubybinary
else:
conf.find_program("ruby", var="RUBY", mandatory=True)
ruby = conf.env.RUBY
try:
version = Utils.cmd_output([ruby, '-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
except:
conf.fatal('could not determine ruby version')
conf.env.RUBY_VERSION = version
try:
ver = tuple(map(int, version.split(".")))
except:
conf.fatal('unsupported ruby version %r' % version)
cver = ''
if minver:
if ver < minver:
conf.fatal('ruby is too old')
cver = ".".join(str(x) for x in minver)
conf.check_message('ruby', cver, True, version)
@conf
def check_ruby_ext_devel(conf):
if not conf.env.RUBY:
conf.fatal('ruby detection is required first')
if not conf.env.CC_NAME and not conf.env.CXX_NAME:
conf.fatal('load a c/c++ compiler first')
version = tuple(map(int, conf.env.RUBY_VERSION.split(".")))
def read_out(cmd):
return Utils.to_list(Utils.cmd_output([conf.env.RUBY, '-rrbconfig', '-e', cmd]))
def read_config(key):
return read_out('puts Config::CONFIG[%r]' % key)
ruby = conf.env['RUBY']
archdir = read_config('archdir')
cpppath = archdir
if version >= (1, 9, 0):
ruby_hdrdir = read_config('rubyhdrdir')
cpppath += ruby_hdrdir
cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
conf.check(header_name='ruby.h', includes=cpppath, mandatory=True, errmsg='could not find ruby header file')
conf.env.LIBPATH_RUBYEXT = read_config('libdir')
conf.env.LIBPATH_RUBYEXT += archdir
conf.env.CPPPATH_RUBYEXT = cpppath
conf.env.CCFLAGS_RUBYEXT = read_config("CCDLFLAGS")
conf.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
# ok this is really stupid, but the command and flags are combined.
# so we try to find the first argument...
flags = read_config('LDSHARED')
while flags and flags[0][0] != '-':
flags = flags[1:]
# we also want to strip out the deprecated ppc flags
if len(flags) > 1 and flags[1] == "ppc":
flags = flags[2:]
conf.env.LINKFLAGS_RUBYEXT = flags
conf.env.LINKFLAGS_RUBYEXT += read_config("LIBS")
conf.env.LINKFLAGS_RUBYEXT += read_config("LIBRUBYARG_SHARED")
if Options.options.rubyarchdir:
conf.env.ARCHDIR_RUBY = Options.options.rubyarchdir
else:
conf.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
if Options.options.rubylibdir:
conf.env.LIBDIR_RUBY = Options.options.rubylibdir
else:
conf.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
def set_options(opt):
opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')

227
tools/wafadmin/Tools/tex.py

@ -1,227 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
"TeX/LaTeX/PDFLaTeX support"
import os, re
import Utils, TaskGen, Task, Runner, Build
from TaskGen import feature, before
from Logs import error, warn, debug
re_tex = re.compile(r'\\(?P<type>include|input|import|bringin){(?P<file>[^{}]*)}', re.M)
def scan(self):
node = self.inputs[0]
env = self.env
nodes = []
names = []
if not node: return (nodes, names)
code = Utils.readf(node.abspath(env))
curdirnode = self.curdirnode
abs = curdirnode.abspath()
for match in re_tex.finditer(code):
path = match.group('file')
if path:
for k in ['', '.tex', '.ltx']:
# add another loop for the tex include paths?
debug('tex: trying %s%s' % (path, k))
try:
os.stat(abs+os.sep+path+k)
except OSError:
continue
found = path+k
node = curdirnode.find_resource(found)
if node:
nodes.append(node)
else:
debug('tex: could not find %s' % path)
names.append(path)
debug("tex: found the following : %s and names %s" % (nodes, names))
return (nodes, names)
g_bibtex_re = re.compile('bibdata', re.M)
def tex_build(task, command='LATEX'):
env = task.env
bld = task.generator.bld
com = '%s %s' % (env[command], env.get_flat(command+'FLAGS'))
if not env['PROMPT_LATEX']: com = "%s %s" % (com, '-interaction=batchmode')
node = task.inputs[0]
reldir = node.bld_dir(env)
srcfile = node.srcpath(env)
lst = []
for c in Utils.split_path(reldir):
if c: lst.append('..')
sr = os.path.join(*(lst + [srcfile]))
sr2 = os.path.join(*(lst + [node.parent.srcpath(env)]))
aux_node = node.change_ext('.aux')
idx_node = node.change_ext('.idx')
hash = ''
old_hash = ''
nm = aux_node.name
docuname = nm[ : len(nm) - 4 ] # 4 is the size of ".aux"
latex_compile_cmd = 'cd %s && TEXINPUTS=%s:$TEXINPUTS %s %s' % (reldir, sr2, com, sr)
warn('first pass on %s' % command)
ret = bld.exec_command(latex_compile_cmd)
if ret: return ret
# look in the .aux file if there is a bibfile to process
try:
ct = Utils.readf(aux_node.abspath(env))
except (OSError, IOError):
error('error bibtex scan')
else:
fo = g_bibtex_re.findall(ct)
# yes, there is a .aux file to process
if fo:
bibtex_compile_cmd = 'cd %s && BIBINPUTS=%s:$BIBINPUTS %s %s' % (reldir, sr2, env['BIBTEX'], docuname)
warn('calling bibtex')
ret = bld.exec_command(bibtex_compile_cmd)
if ret:
error('error when calling bibtex %s' % bibtex_compile_cmd)
return ret
# look on the filesystem if there is a .idx file to process
try:
idx_path = idx_node.abspath(env)
os.stat(idx_path)
except OSError:
error('error file.idx scan')
else:
makeindex_compile_cmd = 'cd %s && %s %s' % (reldir, env['MAKEINDEX'], idx_path)
warn('calling makeindex')
ret = bld.exec_command(makeindex_compile_cmd)
if ret:
error('error when calling makeindex %s' % makeindex_compile_cmd)
return ret
i = 0
while i < 10:
# prevent against infinite loops - one never knows
i += 1
# watch the contents of file.aux
old_hash = hash
try:
hash = Utils.h_file(aux_node.abspath(env))
except KeyError:
error('could not read aux.h -> %s' % aux_node.abspath(env))
pass
# debug
#print "hash is, ", hash, " ", old_hash
# stop if file.aux does not change anymore
if hash and hash == old_hash: break
# run the command
warn('calling %s' % command)
ret = bld.exec_command(latex_compile_cmd)
if ret:
error('error when calling %s %s' % (command, latex_compile_cmd))
return ret
# 0 means no error
return 0
latex_vardeps = ['LATEX', 'LATEXFLAGS']
def latex_build(task):
return tex_build(task, 'LATEX')
pdflatex_vardeps = ['PDFLATEX', 'PDFLATEXFLAGS']
def pdflatex_build(task):
return tex_build(task, 'PDFLATEX')
class tex_taskgen(TaskGen.task_gen):
def __init__(self, *k, **kw):
TaskGen.task_gen.__init__(self, *k, **kw)
@feature('tex')
@before('apply_core')
def apply_tex(self):
if not getattr(self, 'type', None) in ['latex', 'pdflatex']:
self.type = 'pdflatex'
tree = self.bld
outs = Utils.to_list(getattr(self, 'outs', []))
# prompt for incomplete files (else the batchmode is used)
self.env['PROMPT_LATEX'] = getattr(self, 'prompt', 1)
deps_lst = []
if getattr(self, 'deps', None):
deps = self.to_list(self.deps)
for filename in deps:
n = self.path.find_resource(filename)
if not n in deps_lst: deps_lst.append(n)
self.source = self.to_list(self.source)
for filename in self.source:
base, ext = os.path.splitext(filename)
node = self.path.find_resource(filename)
if not node: raise Utils.WafError('cannot find %s' % filename)
if self.type == 'latex':
task = self.create_task('latex', node, node.change_ext('.dvi'))
elif self.type == 'pdflatex':
task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
task.env = self.env
task.curdirnode = self.path
# add the manual dependencies
if deps_lst:
variant = node.variant(self.env)
try:
lst = tree.node_deps[task.unique_id()]
for n in deps_lst:
if not n in lst:
lst.append(n)
except KeyError:
tree.node_deps[task.unique_id()] = deps_lst
if self.type == 'latex':
if 'ps' in outs:
tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + ':' + self.path.abspath() + ':' + self.path.abspath(self.env)}
if 'pdf' in outs:
tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
tsk.env.env = {'TEXINPUTS' : node.parent.abspath() + ':' + self.path.abspath() + ':' + self.path.abspath(self.env)}
elif self.type == 'pdflatex':
if 'ps' in outs:
self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
self.source = []
def detect(conf):
v = conf.env
for p in 'tex latex pdflatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
conf.find_program(p, var=p.upper())
v[p.upper()+'FLAGS'] = ''
v['DVIPSFLAGS'] = '-Ppdf'
b = Task.simple_task_type
b('tex', '${TEX} ${TEXFLAGS} ${SRC}', color='BLUE', shell=False)
b('bibtex', '${BIBTEX} ${BIBTEXFLAGS} ${SRC}', color='BLUE', shell=False)
b('dvips', '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
b('dvipdf', '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}', color='BLUE', after="latex pdflatex tex bibtex", shell=False)
b('pdf2ps', '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}', color='BLUE', after="dvipdf pdflatex", shell=False)
b = Task.task_type_from_func
cls = b('latex', latex_build, vars=latex_vardeps)
cls.scan = scan
cls = b('pdflatex', pdflatex_build, vars=pdflatex_vardeps)
cls.scan = scan

307
tools/wafadmin/Tools/vala.py

@ -1,307 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
# Ali Sabil, 2007
import os.path, shutil
import Task, Runner, Utils, Logs, Build, Node, Options
from TaskGen import extension, after, before
EXT_VALA = ['.vala', '.gs']
class valac_task(Task.Task):
vars = ("VALAC", "VALAC_VERSION", "VALAFLAGS")
before = ("cc", "cxx")
def run(self):
env = self.env
inputs = [a.srcpath(env) for a in self.inputs]
valac = env['VALAC']
vala_flags = env.get_flat('VALAFLAGS')
top_src = self.generator.bld.srcnode.abspath()
top_bld = self.generator.bld.srcnode.abspath(env)
if env['VALAC_VERSION'] > (0, 1, 6):
cmd = [valac, '-C', '--quiet', vala_flags]
else:
cmd = [valac, '-C', vala_flags]
if self.threading:
cmd.append('--thread')
if self.profile:
cmd.append('--profile=%s' % self.profile)
if self.target_glib:
cmd.append('--target-glib=%s' % self.target_glib)
features = self.generator.features
if 'cshlib' in features or 'cstaticlib' in features:
output_dir = self.outputs[0].bld_dir(env)
cmd.append('--library ' + self.target)
if env['VALAC_VERSION'] >= (0, 7, 0):
cmd.append('--header ' + os.path.join(output_dir, self.target + '.h'))
self.outputs.append(self.generator.path.find_or_declare(self.target + '.h'))
cmd.append('--basedir ' + top_src)
cmd.append('-d ' + top_bld)
if env['VALAC_VERSION'] > (0, 7, 2) and hasattr(self, 'gir'):
cmd.append('--gir=%s.gir' % self.gir)
else:
output_dir = self.outputs[0].bld_dir(env)
cmd.append('-d %s' % output_dir)
for vapi_dir in self.vapi_dirs:
cmd.append('--vapidir=%s' % vapi_dir)
for package in self.packages:
cmd.append('--pkg %s' % package)
for package in self.packages_private:
cmd.append('--pkg %s' % package)
cmd.append(" ".join(inputs))
result = self.generator.bld.exec_command(" ".join(cmd))
if not 'cprogram' in features:
# generate the .deps file
if self.packages:
filename = os.path.join(self.generator.path.abspath(env), "%s.deps" % self.target)
deps = open(filename, 'w')
for package in self.packages:
deps.write(package + '\n')
deps.close()
# handle vala 0.1.6 who doesn't honor --directory for the generated .vapi
self._fix_output("../%s.vapi" % self.target)
# handle vala >= 0.1.7 who has a weid definition for --directory
self._fix_output("%s.vapi" % self.target)
# handle vala >= 0.2.0 who doesn't honor --directory for the generated .gidl
self._fix_output("%s.gidl" % self.target)
# handle vala >= 0.3.6 who doesn't honor --directory for the generated .gir
self._fix_output("%s.gir" % self.target)
if hasattr(self, 'gir'):
self._fix_output("%s.gir" % self.gir)
first = None
for node in self.outputs:
if not first:
first = node
else:
if first.parent.id != node.parent.id:
# issue #483
if env['VALAC_VERSION'] < (0, 7, 0):
shutil.move(first.parent.abspath(self.env) + os.sep + node.name, node.abspath(self.env))
return result
def install(self):
bld = self.generator.bld
features = self.generator.features
if self.attr("install_path") and ("cshlib" in features or "cstaticlib" in features):
headers_list = [o for o in self.outputs if o.suffix() == ".h"]
vapi_list = [o for o in self.outputs if (o.suffix() in (".vapi", ".deps"))]
gir_list = [o for o in self.outputs if o.suffix() == ".gir"]
for header in headers_list:
top_src = self.generator.bld.srcnode
package = self.env['PACKAGE']
try:
api_version = Utils.g_module.API_VERSION
except AttributeError:
version = Utils.g_module.VERSION.split(".")
if version[0] == "0":
api_version = "0." + version[1]
else:
api_version = version[0] + ".0"
install_path = '${INCLUDEDIR}/%s-%s/%s' % (package, api_version, header.relpath_gen(top_src))
bld.install_as(install_path, header, self.env)
bld.install_files('${DATAROOTDIR}/vala/vapi', vapi_list, self.env)
bld.install_files('${DATAROOTDIR}/gir-1.0', gir_list, self.env)
def _fix_output(self, output):
top_bld = self.generator.bld.srcnode.abspath(self.env)
try:
src = os.path.join(top_bld, output)
dst = self.generator.path.abspath (self.env)
shutil.move(src, dst)
except:
pass
@extension(EXT_VALA)
def vala_file(self, node):
valatask = getattr(self, "valatask", None)
# there is only one vala task and it compiles all vala files .. :-/
if not valatask:
valatask = self.create_task('valac')
self.valatask = valatask
self.includes = Utils.to_list(getattr(self, 'includes', []))
self.uselib = self.to_list(self.uselib)
valatask.packages = []
valatask.packages_private = Utils.to_list(getattr(self, 'packages_private', []))
valatask.vapi_dirs = []
valatask.target = self.target
valatask.threading = False
valatask.install_path = self.install_path
valatask.profile = getattr (self, 'profile', 'gobject')
valatask.target_glib = None #Deprecated
packages = Utils.to_list(getattr(self, 'packages', []))
vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
includes = []
if hasattr(self, 'uselib_local'):
local_packages = Utils.to_list(self.uselib_local)
seen = []
while len(local_packages) > 0:
package = local_packages.pop()
if package in seen:
continue
seen.append(package)
# check if the package exists
package_obj = self.name_to_obj(package)
if not package_obj:
raise Utils.WafError("object '%s' was not found in uselib_local (required by '%s')" % (package, self.name))
package_name = package_obj.target
package_node = package_obj.path
package_dir = package_node.relpath_gen(self.path)
for task in package_obj.tasks:
for output in task.outputs:
if output.name == package_name + ".vapi":
valatask.set_run_after(task)
if package_name not in packages:
packages.append(package_name)
if package_dir not in vapi_dirs:
vapi_dirs.append(package_dir)
if package_dir not in includes:
includes.append(package_dir)
if hasattr(package_obj, 'uselib_local'):
lst = self.to_list(package_obj.uselib_local)
lst.reverse()
local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
valatask.packages = packages
for vapi_dir in vapi_dirs:
try:
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
valatask.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath(self.env))
except AttributeError:
Logs.warn("Unable to locate Vala API directory: '%s'" % vapi_dir)
self.includes.append(node.bld.srcnode.abspath())
self.includes.append(node.bld.srcnode.abspath(self.env))
for include in includes:
try:
self.includes.append(self.path.find_dir(include).abspath())
self.includes.append(self.path.find_dir(include).abspath(self.env))
except AttributeError:
Logs.warn("Unable to locate include directory: '%s'" % include)
if valatask.profile == 'gobject':
if hasattr(self, 'target_glib'):
Logs.warn ('target_glib on vala tasks is deprecated --vala-target-glib=MAJOR.MINOR from the vala tool options')
if getattr(Options.options, 'vala_target_glib', None):
valatask.target_glib = Options.options.vala_target_glib
if not 'GOBJECT' in self.uselib:
self.uselib.append('GOBJECT')
if hasattr(self, 'threading'):
if valatask.profile == 'gobject':
valatask.threading = self.threading
if not 'GTHREAD' in self.uselib:
self.uselib.append('GTHREAD')
else:
#Vala doesn't have threading support for dova nor posix
Logs.warn("Profile %s does not have threading support" % valatask.profile)
if hasattr(self, 'gir'):
valatask.gir = self.gir
env = valatask.env
output_nodes = []
c_node = node.change_ext('.c')
output_nodes.append(c_node)
self.allnodes.append(c_node)
if env['VALAC_VERSION'] < (0, 7, 0):
output_nodes.append(node.change_ext('.h'))
else:
if not 'cprogram' in self.features:
output_nodes.append(self.path.find_or_declare('%s.h' % self.target))
if not 'cprogram' in self.features:
output_nodes.append(self.path.find_or_declare('%s.vapi' % self.target))
if env['VALAC_VERSION'] > (0, 7, 2):
if hasattr(self, 'gir'):
output_nodes.append(self.path.find_or_declare('%s.gir' % self.gir))
elif env['VALAC_VERSION'] > (0, 3, 5):
output_nodes.append(self.path.find_or_declare('%s.gir' % self.target))
elif env['VALAC_VERSION'] > (0, 1, 7):
output_nodes.append(self.path.find_or_declare('%s.gidl' % self.target))
if valatask.packages:
output_nodes.append(self.path.find_or_declare('%s.deps' % self.target))
valatask.inputs.append(node)
valatask.outputs.extend(output_nodes)
def detect(conf):
min_version = (0, 1, 6)
min_version_str = "%d.%d.%d" % min_version
valac = conf.find_program('valac', var='VALAC', mandatory=True)
if not conf.env["HAVE_GOBJECT"]:
pkg_args = {'package': 'gobject-2.0',
'uselib_store': 'GOBJECT',
'args': '--cflags --libs'}
if getattr(Options.options, 'vala_target_glib', None):
pkg_args['atleast_version'] = Options.options.vala_target_glib
conf.check_cfg(**pkg_args)
if not conf.env["HAVE_GTHREAD"]:
pkg_args = {'package': 'gthread-2.0',
'uselib_store': 'GTHREAD',
'args': '--cflags --libs'}
if getattr(Options.options, 'vala_target_glib', None):
pkg_args['atleast_version'] = Options.options.vala_target_glib
conf.check_cfg(**pkg_args)
try:
output = Utils.cmd_output(valac + " --version", silent=True)
version = output.split(' ', 1)[-1].strip().split(".")[0:3]
version = [int(x) for x in version]
valac_version = tuple(version)
except Exception:
valac_version = (0, 0, 0)
conf.check_message('program version',
'valac >= ' + min_version_str,
valac_version >= min_version,
"%d.%d.%d" % valac_version)
conf.check_tool('gnu_dirs')
if valac_version < min_version:
conf.fatal("valac version too old to be used with this tool")
return
conf.env['VALAC_VERSION'] = valac_version
conf.env['VALAFLAGS'] = ''
def set_options (opt):
valaopts = opt.add_option_group('Vala Compiler Options')
valaopts.add_option ('--vala-target-glib', default=None,
dest='vala_target_glib', metavar='MAJOR.MINOR',
help='Target version of glib for Vala GObject code generation')
Loading…
Cancel
Save