Browse Source

gyp: update to bebdcea

v0.10.22-release
Timothy J Fontaine 11 years ago
parent
commit
2010985354
  1. 4
      Makefile
  2. 2
      configure
  3. 2
      tools/gyp/AUTHORS
  4. 2
      tools/gyp/DEPS
  5. 21
      tools/gyp/MANIFEST
  6. 1
      tools/gyp/PRESUBMIT.py
  7. 148
      tools/gyp/buildbot/buildbot_run.py
  8. 19
      tools/gyp/gyp
  9. 2
      tools/gyp/gyp.bat
  10. 18
      tools/gyp/gyp_main.py
  11. 11
      tools/gyp/gyptest.py
  12. 5
      tools/gyp/pylib/gyp/MSVSNew.py
  13. 7
      tools/gyp/pylib/gyp/MSVSSettings.py
  14. 81
      tools/gyp/pylib/gyp/MSVSUtil.py
  15. 43
      tools/gyp/pylib/gyp/MSVSVersion.py
  16. 199
      tools/gyp/pylib/gyp/SCons.py
  17. 105
      tools/gyp/pylib/gyp/__init__.py
  18. 26
      tools/gyp/pylib/gyp/common.py
  19. 12
      tools/gyp/pylib/gyp/flock_tool.py
  20. 96
      tools/gyp/pylib/gyp/generator/android.py
  21. 14
      tools/gyp/pylib/gyp/generator/dump_dependency_json.py
  22. 47
      tools/gyp/pylib/gyp/generator/eclipse.py
  23. 79
      tools/gyp/pylib/gyp/generator/make.py
  24. 175
      tools/gyp/pylib/gyp/generator/msvs.py
  25. 803
      tools/gyp/pylib/gyp/generator/ninja.py
  26. 8
      tools/gyp/pylib/gyp/generator/ninja_test.py
  27. 1072
      tools/gyp/pylib/gyp/generator/scons.py
  28. 73
      tools/gyp/pylib/gyp/generator/xcode.py
  29. 23
      tools/gyp/pylib/gyp/generator/xcode_test.py
  30. 242
      tools/gyp/pylib/gyp/input.py
  31. 90
      tools/gyp/pylib/gyp/input_test.py
  32. 73
      tools/gyp/pylib/gyp/mac_tool.py
  33. 143
      tools/gyp/pylib/gyp/msvs_emulation.py
  34. 18
      tools/gyp/pylib/gyp/ninja_syntax.py
  35. 37
      tools/gyp/pylib/gyp/win_tool.py
  36. 307
      tools/gyp/pylib/gyp/xcode_emulation.py
  37. 28
      tools/gyp/pylib/gyp/xcodeproj_file.py
  38. 11
      tools/gyp/setup.py
  39. 1
      tools/gyp/tools/emacs/gyp.el
  40. 0
      tools/gyp_node.py

4
Makefile

@ -46,9 +46,9 @@ endif
out/Makefile: common.gypi deps/uv/uv.gyp deps/http_parser/http_parser.gyp deps/zlib/zlib.gyp deps/v8/build/common.gypi deps/v8/tools/gyp/v8.gyp node.gyp config.gypi
ifeq ($(USE_NINJA),1)
touch out/Makefile
$(PYTHON) tools/gyp_node -f ninja
$(PYTHON) tools/gyp_node.py -f ninja
else
$(PYTHON) tools/gyp_node -f make
$(PYTHON) tools/gyp_node.py -f make
endif
config.gypi: configure

2
configure

@ -704,4 +704,4 @@ elif flavor == 'win':
else:
gyp_args = ['-f', 'make-' + flavor]
subprocess.call([sys.executable, 'tools/gyp_node'] + gyp_args)
subprocess.call([sys.executable, 'tools/gyp_node.py'] + gyp_args)

2
tools/gyp/AUTHORS

@ -3,6 +3,8 @@
Google Inc.
Bloomberg Finance L.P.
Yandex LLC
Steven Knight <knight@baldmt.com>
Ryan Norton <rnorton10@gmail.com>
Eric N. Vander Weele <ericvw@gmail.com>

2
tools/gyp/DEPS

@ -8,8 +8,6 @@ vars = {
}
deps = {
"scons":
Var("chrome_trunk") + "/src/third_party/scons@44099",
}
deps_os = {

21
tools/gyp/MANIFEST

@ -1,21 +0,0 @@
setup.py
gyp
LICENSE
AUTHORS
pylib/gyp/MSVSNew.py
pylib/gyp/MSVSProject.py
pylib/gyp/MSVSToolFile.py
pylib/gyp/MSVSUserFile.py
pylib/gyp/MSVSVersion.py
pylib/gyp/SCons.py
pylib/gyp/__init__.py
pylib/gyp/common.py
pylib/gyp/input.py
pylib/gyp/xcodeproj_file.py
pylib/gyp/generator/__init__.py
pylib/gyp/generator/gypd.py
pylib/gyp/generator/gypsh.py
pylib/gyp/generator/make.py
pylib/gyp/generator/msvs.py
pylib/gyp/generator/scons.py
pylib/gyp/generator/xcode.py

1
tools/gyp/PRESUBMIT.py

@ -17,7 +17,6 @@ PYLINT_BLACKLIST = [
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
# Needs style fix.
'pylib/gyp/generator/scons.py',
'pylib/gyp/generator/xcode.py',
]

148
tools/gyp/buildbot/buildbot_run.py

@ -0,0 +1,148 @@
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Argument-less script to select what to run on the buildbots."""
import os
import shutil
import subprocess
import sys
if sys.platform in ['win32', 'cygwin']:
EXE_SUFFIX = '.exe'
else:
EXE_SUFFIX = ''
BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
ROOT_DIR = os.path.dirname(TRUNK_DIR)
ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
OUT_DIR = os.path.join(TRUNK_DIR, 'out')
def CallSubProcess(*args, **kwargs):
"""Wrapper around subprocess.call which treats errors as build exceptions."""
retcode = subprocess.call(*args, **kwargs)
if retcode != 0:
print '@@@STEP_EXCEPTION@@@'
sys.exit(1)
def PrepareAndroidTree():
"""Prepare an Android tree to run 'android' format tests."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber Android checkout@@@'
shutil.rmtree(ANDROID_DIR)
# The release of Android we use is static, so there's no need to do anything
# if the directory already exists.
if os.path.isdir(ANDROID_DIR):
return
print '@@@BUILD_STEP Initialize Android checkout@@@'
os.mkdir(ANDROID_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', 'chrome-bot@google.com'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
CallSubProcess(
['repo', 'init',
'-u', 'https://android.googlesource.com/platform/manifest',
'-b', 'android-4.2.1_r1',
'-g', 'all,-notdefault,-device,-darwin,-mips,-x86'],
cwd=ANDROID_DIR)
print '@@@BUILD_STEP Sync Android@@@'
CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR)
print '@@@BUILD_STEP Build Android@@@'
CallSubProcess(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && make -j4'],
cwd=ANDROID_DIR)
def GypTestFormat(title, format=None, msvs_version=None):
"""Run the gyp tests for a given format, emitting annotator tags.
See annotator docs at:
https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
Args:
format: gyp format to test.
Returns:
0 for sucesss, 1 for failure.
"""
if not format:
format = title
print '@@@BUILD_STEP ' + title + '@@@'
sys.stdout.flush()
env = os.environ.copy()
if msvs_version:
env['GYP_MSVS_VERSION'] = msvs_version
command = ' '.join(
[sys.executable, 'trunk/gyptest.py',
'--all',
'--passed',
'--format', format,
'--chdir', 'trunk'])
if format == 'android':
# gyptest needs the environment setup from envsetup/lunch in order to build
# using the 'android' backend, so this is done in a single shell.
retcode = subprocess.call(
['/bin/bash',
'-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s'
% (ROOT_DIR, command)],
cwd=ANDROID_DIR, env=env)
else:
retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
if retcode:
# Emit failure tag, and keep going.
print '@@@STEP_FAILURE@@@'
return 1
return 0
def GypBuild():
# Dump out/ directory.
print '@@@BUILD_STEP cleanup@@@'
print 'Removing %s...' % OUT_DIR
shutil.rmtree(OUT_DIR, ignore_errors=True)
print 'Done.'
retcode = 0
# The Android gyp bot runs on linux so this must be tested first.
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android':
PrepareAndroidTree()
retcode += GypTestFormat('android')
elif sys.platform.startswith('linux'):
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('make')
elif sys.platform == 'darwin':
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('xcode')
retcode += GypTestFormat('make')
elif sys.platform == 'win32':
retcode += GypTestFormat('ninja')
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012')
else:
raise Exception('Unknown platform')
if retcode:
# TODO(bradnelson): once the annotator supports a postscript (section for
# after the build proper that could be used for cumulative failures),
# use that instead of this. This isolates the final return value so
# that it isn't misattributed to the last stage.
print '@@@BUILD_STEP failures@@@'
sys.exit(retcode)
if __name__ == '__main__':
GypBuild()

19
tools/gyp/gyp

@ -1,18 +1,7 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
#!/bin/bash
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
# TODO(mark): sys.path manipulation is some temporary testing stuff.
try:
import gyp
except ImportError, e:
import os.path
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
import gyp
if __name__ == '__main__':
sys.exit(gyp.main(sys.argv[1:]))
BASE=`dirname $0`
python $BASE/gyp_main.py "$@"

2
tools/gyp/gyp.bat

@ -2,4 +2,4 @@
@rem Use of this source code is governed by a BSD-style license that can be
@rem found in the LICENSE file.
@python "%~dp0/gyp" %*
@python "%~dp0gyp_main.py" %*

18
tools/gyp/gyp_main.py

@ -0,0 +1,18 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
# TODO(mark): sys.path manipulation is some temporary testing stuff.
try:
import gyp
except ImportError, e:
import os.path
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
import gyp
if __name__ == '__main__':
sys.exit(gyp.script_main())

11
tools/gyp/gyptest.py

@ -130,13 +130,16 @@ sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
def is_test_name(f):
return f.startswith('gyptest') and f.endswith('.py')
def find_all_gyptest_files(directory):
result = []
for root, dirs, files in os.walk(directory):
if '.svn' in dirs:
dirs.remove('.svn')
result.extend([ os.path.join(root, f) for f in files
if f.startswith('gyptest') and f.endswith('.py') ])
result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
result.sort()
return result
@ -186,6 +189,9 @@ def main(argv=None):
if os.path.isdir(arg):
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
else:
if not is_test_name(os.path.basename(arg)):
print >>sys.stderr, arg, 'is not a valid gyp test name.'
sys.exit(1)
tests.append(arg)
if opts.list:
@ -210,6 +216,7 @@ def main(argv=None):
else:
# TODO: not duplicate this mapping from pylib/gyp/__init__.py
format_list = {
'aix5': ['make'],
'freebsd7': ['make'],
'freebsd8': ['make'],
'openbsd5': ['make'],

5
tools/gyp/pylib/gyp/MSVSNew.py

@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""New implementation of Visual Studio project generation for SCons."""
"""New implementation of Visual Studio project generation."""
import os
import random
@ -325,7 +325,8 @@ class MSVSSolution:
f.write('\tEndGlobalSection\r\n')
# Folder mappings
# TODO(rspangler): Should omit this section if there are no folders
# Omit this section if there are no folders
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
for e in all_entries:
if not isinstance(e, MSVSFolder):

7
tools/gyp/pylib/gyp/MSVSSettings.py

@ -834,8 +834,13 @@ _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
# These settings generate correctly in the MSVS output files when using
# e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from
# configuration entries, but result in spurious artifacts which can be
# safely ignored here. See crbug.com/246570
_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list)
_MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list)
_MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list)
# MSBuild options not found in MSVS.
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)

81
tools/gyp/pylib/gyp/MSVSUtil.py

@ -10,7 +10,8 @@ import os
_TARGET_TYPE_EXT = {
'executable': '.exe',
'shared_library': '.dll'
'loadable_module': '.dll',
'shared_library': '.dll',
}
@ -121,6 +122,46 @@ def ShardTargets(target_list, target_dicts):
return (new_target_list, new_target_dicts)
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
configuration.
The lookup proceeds as follows:
- Look for an explicit path in the VCLinkerTool configuration block.
- Look for an 'msvs_large_pdb_path' variable.
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
specified.
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
Arguments:
target_dict: The target dictionary to be searched.
config_name: The name of the configuration of interest.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
The path of the corresponding PDB file.
"""
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.get('VCLinkerTool', {})
pdb_path = linker.get('ProgramDatabaseFile')
if pdb_path:
return pdb_path
variables = target_dict.get('variables', {})
pdb_path = variables.get('msvs_large_pdb_path', None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get('product_name', target_dict['target_name'])
pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
return pdb_path
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
@ -138,6 +179,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
@ -162,7 +204,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = '_large_pdb_copy'
copy_suffix = 'large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
@ -179,7 +221,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = '_large_pdb_shim'
shim_suffix = 'large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
@ -190,19 +232,32 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# Set up the shim to output its PDB to the same location as the final linker
# target.
for config in shim_dict.get('configurations').itervalues():
msvs = config.setdefault('msvs_settings')
for config_name, config in shim_dict.get('configurations').iteritems():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
linker = msvs.pop('VCLinkerTool') # We want to clear this dict.
pdb_path = linker.get('ProgramDatabaseFile')
# A few keys that we don't want to propagate.
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
config.pop(key, None)
compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler.setdefault('DebugInformationFormat', '3')
compiler.setdefault('ProgramDataBaseFileName', pdb_path)
msvs = config.setdefault('msvs_settings', {})
# Add the new targets.
target_list.append(full_copy_target_name)
target_list.append(full_shim_target_name)
# Update the compiler directives in the shim target.
compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler['DebugInformationFormat'] = '3'
compiler['ProgramDataBaseFileName'] = pdb_path
# Set the explicit PDB path in the appropriate configuration of the
# original target.
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.setdefault('VCLinkerTool', {})
linker['GenerateDebugInformation'] = 'true'
linker['ProgramDatabaseFile'] = pdb_path
# Add the new targets. They must go to the beginning of the list so that
# the dependency generation works as expected in ninja.
target_list.insert(0, full_copy_target_name)
target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict

43
tools/gyp/pylib/gyp/MSVSVersion.py

@ -83,6 +83,13 @@ class VisualStudioVersion(object):
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
if self.short_name == '2013' and (
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# VS2013 non-Express has a x64-x86 cross that we want to prefer.
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
# Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
@ -197,6 +204,24 @@ def _CreateVersion(name, path, sdk_based=False):
if path:
path = os.path.normpath(path)
versions = {
'2013': VisualStudioVersion('2013',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=False,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2013e': VisualStudioVersion('2013e',
'Visual Studio 2013',
solution_version='13.00',
project_version='12.0',
flat_sln=True,
uses_vcxproj=True,
path=path,
sdk_based=sdk_based,
default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
@ -224,7 +249,7 @@ def _CreateVersion(name, path, sdk_based=False):
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
'Visual Studio 2010',
'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
@ -288,10 +313,16 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (11)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
'8.0': '2005', '9.0': '2008', '10.0': '2010', '11.0': '2012'}
'8.0': '2005',
'9.0': '2008',
'10.0': '2010',
'11.0': '2012',
'12.0': '2013',
}
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
@ -345,7 +376,7 @@ def SelectVisualStudioVersion(version='auto'):
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
'auto': ('10.0', '9.0', '8.0', '11.0'),
'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
@ -354,13 +385,15 @@ def SelectVisualStudioVersion(version='auto'):
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
'2013': ('12.0',),
'2013e': ('12.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
if not msvs_version or 'e' not in msvs_version:
if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
'set to an "e" version (e.g. 2010e)')
'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)

199
tools/gyp/pylib/gyp/SCons.py

@ -1,199 +0,0 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
SCons generator.
This contains class definitions and supporting functions for generating
pieces of SCons files for the different types of GYP targets.
"""
import os
def WriteList(fp, list, prefix='',
separator=',\n ',
preamble=None,
postamble=None):
fp.write(preamble or '')
fp.write((separator or ' ').join([prefix + l for l in list]))
fp.write(postamble or '')
class TargetBase(object):
"""
Base class for a SCons representation of a GYP target.
"""
is_ignored = False
target_prefix = ''
target_suffix = ''
def __init__(self, spec):
self.spec = spec
def full_product_name(self):
"""
Returns the full name of the product being built:
* Uses 'product_name' if it's set, else prefix + 'target_name'.
* Prepends 'product_dir' if set.
* Appends SCons suffix variables for the target type (or
product_extension).
"""
suffix = self.target_suffix
product_extension = self.spec.get('product_extension')
if product_extension:
suffix = '.' + product_extension
prefix = self.spec.get('product_prefix', self.target_prefix)
name = self.spec['target_name']
name = prefix + self.spec.get('product_name', name) + suffix
product_dir = self.spec.get('product_dir')
if product_dir:
name = os.path.join(product_dir, name)
else:
name = os.path.join(self.out_dir, name)
return name
def write_input_files(self, fp):
"""
Writes the definition of the input files (sources).
"""
sources = self.spec.get('sources')
if not sources:
fp.write('\ninput_files = []\n')
return
preamble = '\ninput_files = [\n '
postamble = ',\n]\n'
WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
def builder_call(self):
"""
Returns the actual SCons builder call to build this target.
"""
name = self.full_product_name()
return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
def write_target(self, fp, src_dir='', pre=''):
"""
Writes the lines necessary to build this target.
"""
fp.write('\n' + pre)
fp.write('_outputs = %s\n' % self.builder_call())
fp.write('target_files.extend(_outputs)\n')
class NoneTarget(TargetBase):
"""
A GYP target type of 'none', implicitly or explicitly.
"""
def write_target(self, fp, src_dir='', pre=''):
fp.write('\ntarget_files.extend(input_files)\n')
class SettingsTarget(TargetBase):
"""
A GYP target type of 'settings'.
"""
is_ignored = True
compilable_sources_template = """
_result = []
for infile in input_files:
if env.compilable(infile):
if (type(infile) == type('')
and (infile.startswith(%(src_dir)r)
or not os.path.isabs(env.subst(infile)))):
# Force files below the build directory by replacing all '..'
# elements in the path with '__':
base, ext = os.path.splitext(os.path.normpath(infile))
base = [d == '..' and '__' or d for d in base.split('/')]
base = os.path.join(*base)
object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
if not infile.startswith(%(src_dir)r):
infile = %(src_dir)r + infile
infile = env.%(name)s(object, infile)[0]
else:
infile = env.%(name)s(infile)[0]
_result.append(infile)
input_files = _result
"""
class CompilableSourcesTargetBase(TargetBase):
"""
An abstract base class for targets that compile their source files.
We explicitly transform compilable files into object files,
even though SCons could infer that for us, because we want
to control where the object file ends up. (The implicit rules
in SCons always put the object file next to the source file.)
"""
intermediate_builder_name = None
def write_target(self, fp, src_dir='', pre=''):
if self.intermediate_builder_name is None:
raise NotImplementedError
if src_dir and not src_dir.endswith('/'):
src_dir += '/'
variables = {
'src_dir': src_dir,
'name': self.intermediate_builder_name,
}
fp.write(compilable_sources_template % variables)
super(CompilableSourcesTargetBase, self).write_target(fp)
class ProgramTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'executable'.
"""
builder_name = 'GypProgram'
intermediate_builder_name = 'StaticObject'
target_prefix = '${PROGPREFIX}'
target_suffix = '${PROGSUFFIX}'
out_dir = '${TOP_BUILDDIR}'
class StaticLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'static_library'.
"""
builder_name = 'GypStaticLibrary'
intermediate_builder_name = 'StaticObject'
target_prefix = '${LIBPREFIX}'
target_suffix = '${LIBSUFFIX}'
out_dir = '${LIB_DIR}'
class SharedLibraryTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'shared_library'.
"""
builder_name = 'GypSharedLibrary'
intermediate_builder_name = 'SharedObject'
target_prefix = '${SHLIBPREFIX}'
target_suffix = '${SHLIBSUFFIX}'
out_dir = '${LIB_DIR}'
class LoadableModuleTarget(CompilableSourcesTargetBase):
"""
A GYP target type of 'loadable_module'.
"""
builder_name = 'GypLoadableModule'
intermediate_builder_name = 'SharedObject'
target_prefix = '${SHLIBPREFIX}'
target_suffix = '${SHLIBSUFFIX}'
out_dir = '${TOP_BUILDDIR}'
TargetMap = {
None : NoneTarget,
'none' : NoneTarget,
'settings' : SettingsTarget,
'executable' : ProgramTarget,
'static_library' : StaticLibraryTarget,
'shared_library' : SharedLibraryTarget,
'loadable_module' : LoadableModuleTarget,
}
def Target(spec):
return TargetMap[spec.get('type')](spec)

105
tools/gyp/pylib/gyp/__init__.py

@ -106,10 +106,6 @@ def Load(build_files, format, default_variables={},
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
'generator_wants_absolute_build_file_paths':
getattr(generator, 'generator_wants_absolute_build_file_paths', False),
'generator_handles_variants':
getattr(generator, 'generator_handles_variants', False),
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
@ -123,12 +119,14 @@ def Load(build_files, format, default_variables={},
'generator_wants_static_library_dependencies_adjusted', True),
'generator_wants_sorted_dependencies':
getattr(generator, 'generator_wants_sorted_dependencies', False),
'generator_filelist_paths':
getattr(generator, 'generator_filelist_paths', None),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check,
params['parallel'])
params['parallel'], params['root_targets'])
return [generator] + result
def NameValueListToDict(name_value_list):
@ -283,26 +281,26 @@ def gyp_main(args):
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
parser.add_option('--build', dest='configs', action='append',
help='configuration for build after project generation')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
parser.add_option('--config-dir', dest='config_dir', action='store',
env_name='GYP_CONFIG_DIR', default=None,
help='The location for configuration files like '
'include.gypi.')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.')
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
parser.add_option('--msvs-version', dest='msvs_version',
regenerate=False,
help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
@ -313,16 +311,9 @@ def gyp_main(args):
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files')
parser.add_option('--parallel', action='store_true',
env_name='GYP_PARALLEL',
help='Use multiprocessing for speed (experimental)')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
parser.add_option('--build', dest='configs', action='append',
help='configuration for build after project generation')
parser.add_option('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between
# .gyp files. These relationships should not exist, but they've only been
# observed to be harmful with the Xcode generator. Chromium's .gyp files
@ -333,13 +324,33 @@ def gyp_main(args):
parser.add_option('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
parser.add_option('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing')
parser.add_option('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
parser.add_option('-R', '--root-target', dest='root_targets',
action='append', metavar='TARGET',
help='include only TARGET and its deep dependencies')
options, build_files_arg = parser.parse_args(args)
build_files = build_files_arg
# We read a few things from ~/.gyp, so set up a var for that.
# Set up the configuration directory (defaults to ~/.gyp)
if not options.config_dir:
home = None
home_dot_gyp = None
if options.use_environment:
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
if home_dot_gyp:
home_dot_gyp = os.path.expanduser(home_dot_gyp)
if not home_dot_gyp:
home_vars = ['HOME']
if sys.platform in ('cygwin', 'win32'):
home_vars.append('USERPROFILE')
home = None
home_dot_gyp = None
for home_var in home_vars:
home = os.getenv(home_var)
if home != None:
@ -348,11 +359,11 @@ def gyp_main(args):
home_dot_gyp = None
else:
break
else:
home_dot_gyp = os.path.expanduser(options.config_dir)
# TODO(thomasvl): add support for ~/.gyp/defaults
options, build_files_arg = parser.parse_args(args)
build_files = build_files_arg
if home_dot_gyp and not os.path.exists(home_dot_gyp):
home_dot_gyp = None
if not options.formats:
# If no format was given on the command line, then check the env variable.
@ -377,9 +388,7 @@ def gyp_main(args):
if g_o:
options.generator_output = g_o
if not options.parallel and options.use_environment:
p = os.environ.get('GYP_PARALLEL')
options.parallel = bool(p and p != '0')
options.parallel = not options.no_parallel
for mode in options.debug:
gyp.debug[mode] = 1
@ -473,15 +482,6 @@ def gyp_main(args):
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
# TODO: Remove this and the option after we've gotten folks to move to the
# generator flag.
if options.msvs_version:
print >>sys.stderr, \
'DEPRECATED: Use generator flag (-G msvs_version=' + \
options.msvs_version + ') instead of --msvs-version=' + \
options.msvs_version
generator_flags['msvs_version'] = options.msvs_version
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
@ -492,7 +492,8 @@ def gyp_main(args):
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp,
'parallel': options.parallel}
'parallel': options.parallel,
'root_targets': options.root_targets}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(build_files, format,
@ -528,5 +529,9 @@ def main(args):
sys.stderr.write("gyp: %s\n" % e)
return 1
# NOTE: setuptools generated console_scripts calls function with no arguments
def script_main():
return main(sys.argv[1:])
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
sys.exit(script_main())

26
tools/gyp/pylib/gyp/common.py

@ -44,6 +44,14 @@ def ExceptionAppend(e, msg):
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
def FindQualifiedTargets(target, qualified_list):
"""
Given a list of qualified targets, return the qualified targets for the
specified |target|.
"""
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
def ParseQualifiedTarget(target):
# Splits a qualified target into a build file, target name and toolset.
@ -131,6 +139,13 @@ def RelativePath(path, relative_to):
path = os.path.realpath(path)
relative_to = os.path.realpath(relative_to)
# On Windows, we can't create a relative path to a different drive, so just
# use the absolute path.
if sys.platform == 'win32':
if (os.path.splitdrive(path)[0].lower() !=
os.path.splitdrive(relative_to)[0].lower()):
return path
# Split the paths into components.
path_split = path.split(os.path.sep)
relative_to_split = relative_to.split(os.path.sep)
@ -401,9 +416,16 @@ def GetFlavor(params):
def CopyTool(flavor, out_path):
"""Finds (mac|sun|win)_tool.gyp in the gyp directory and copies it
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
prefix = { 'solaris': 'sun', 'mac': 'mac', 'win': 'win' }.get(flavor, None)
# aix and solaris just need flock emulation. mac and win use more complicated
# support scripts.
prefix = {
'aix': 'flock',
'solaris': 'flock',
'mac': 'mac',
'win': 'win'
}.get(flavor, None)
if not prefix:
return

12
tools/gyp/pylib/gyp/sun_tool.py → tools/gyp/pylib/gyp/flock_tool.py

@ -3,8 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""These functions are executed via gyp-sun-tool when using the Makefile
generator."""
"""These functions are executed via gyp-flock-tool when using the Makefile
generator. Used on systems that don't have a built-in flock."""
import fcntl
import os
@ -14,14 +14,12 @@ import sys
def main(args):
executor = SunTool()
executor = FlockTool()
executor.Dispatch(args)
class SunTool(object):
"""This class performs all the SunOS tooling steps. The methods can either be
executed directly, or dispatched from an argument list."""
class FlockTool(object):
"""This class emulates the 'flock' command."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:

96
tools/gyp/pylib/gyp/generator/android.py

@ -39,7 +39,7 @@ generator_default_variables = {
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(GYP_DEFAULT_CONFIGURATION)',
'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
}
# Make supports multiple toolsets
@ -292,11 +292,9 @@ class AndroidMkWriter(object):
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)' %
main_output)
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' %
main_output)
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# Android's envsetup.sh adds a number of directories to the path including
# the built host binary directory. This causes actions/rules invoked by
@ -394,11 +392,9 @@ class AndroidMkWriter(object):
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)'
% main_output)
'$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)'
% main_output)
'$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# See explanation in WriteActions.
self.WriteLn('%s: export PATH := '
@ -413,7 +409,9 @@ class AndroidMkWriter(object):
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
self.WriteLn('%s: %s' % (output, main_output))
# Make each output depend on the main output, with an empty command
# to force make to notice that the mtime has changed.
self.WriteLn('%s: %s ;' % (output, main_output))
self.WriteLn('.PHONY: %s' % (rule_trigger))
self.WriteLn('%s: %s' % (rule_trigger, main_output))
self.WriteLn('')
@ -470,42 +468,39 @@ class AndroidMkWriter(object):
Args:
spec, configs: input from gyp.
"""
config = configs[spec['default_configuration']]
for configname, config in sorted(configs.iteritems()):
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
config.get('cflags'))
config.get('cflags', []) + config.get('cflags_c', []))
extracted_includes.extend(includes_from_cflags)
self.WriteList(cflags, 'MY_CFLAGS')
self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
cflags_c, includes_from_cflags_c = self.ExtractIncludesFromCFlags(
config.get('cflags_c'))
extracted_includes.extend(includes_from_cflags_c)
self.WriteList(cflags_c, 'MY_CFLAGS_C')
self.WriteList(config.get('defines'), 'MY_DEFS', prefix='-D',
quoter=make.EscapeCppDefine)
self.WriteLn('LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host or
# target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
prefix='-D', quoter=make.EscapeCppDefine)
self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
includes = list(config.get('include_dirs', []))
includes.extend(extracted_includes)
includes = map(Sourceify, map(self.LocalPathify, includes))
includes = self.NormalizeIncludePaths(includes)
self.WriteList(includes, 'LOCAL_C_INCLUDES')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES)')
self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS')
self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
'$(MY_DEFS_$(GYP_CONFIGURATION))')
# Undefine ANDROID for host modules
# TODO: the source code should not use macro ANDROID to tell if it's host
# or target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
def WriteSources(self, spec, configs, extra_sources):
@ -698,24 +693,6 @@ class AndroidMkWriter(object):
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
def NormalizeLdFlags(self, ld_flags):
""" Clean up ldflags from gyp file.
Remove any ldflags that contain android_top_dir.
Args:
ld_flags: ldflags from gyp files.
Returns:
clean ldflags
"""
clean_ldflags = []
for flag in ld_flags:
if self.android_top_dir in flag:
continue
clean_ldflags.append(flag)
return clean_ldflags
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory;
@ -747,7 +724,6 @@ class AndroidMkWriter(object):
"""
clean_cflags = []
include_paths = []
if cflags:
for flag in cflags:
if flag.startswith('-I'):
include_paths.append(flag[2:])
@ -816,14 +792,11 @@ class AndroidMkWriter(object):
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
config = configs[spec['default_configuration']]
# LDFLAGS
for configname, config in sorted(configs.iteritems()):
ldflags = list(config.get('ldflags', []))
static_flags, dynamic_flags = self.ComputeAndroidLibraryModuleNames(
ldflags)
self.WriteLn('')
self.WriteList(self.NormalizeLdFlags(ldflags), 'LOCAL_LDFLAGS')
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
self.WriteLn('\nLOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION))')
# Libraries (i.e. -lfoo)
libraries = gyp.common.uniquer(spec.get('libraries', []))
@ -834,12 +807,12 @@ class AndroidMkWriter(object):
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
self.WriteLn('')
self.WriteList(static_flags + static_libs + static_link_deps,
self.WriteList(static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
self.WriteLn('')
self.WriteList(dynamic_flags + dynamic_libs + shared_link_deps,
self.WriteList(dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
@ -1083,10 +1056,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
# Some tools need to know the absolute path of the top directory.
root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n')
root_makefile.write('GYP_DEFAULT_CONFIGURATION := %s\n' %
default_configuration)
root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
# Write out the sorted list of includes.
root_makefile.write('\n')

14
tools/gyp/pylib/gyp/generator/dump_dependency_json.py

@ -45,19 +45,7 @@ def CalculateVariables(default_variables, params):
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):

47
tools/gyp/pylib/gyp/generator/eclipse.py

@ -22,6 +22,7 @@ import os.path
import subprocess
import gyp
import gyp.common
import gyp.msvs_emulation
import shlex
generator_wants_static_library_dependencies_adjusted = False
@ -52,7 +53,18 @@ def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
flavor = gyp.common.GetFlavor(params)
default_variables.setdefault('OS', flavor)
if flavor == 'win':
# Copy additional generator configuration data from VS, which is shared
# by the Eclipse generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
@ -65,7 +77,7 @@ def CalculateGeneratorInputInfo(params):
def GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs, config_name):
shared_intermediate_dirs, config_name, params):
"""Calculate the set of include directories to be used.
Returns:
@ -76,6 +88,9 @@ def GetAllIncludeDirectories(target_list, target_dicts,
gyp_includes_set = set()
compiler_includes_list = []
flavor = gyp.common.GetFlavor(params)
if flavor == 'win':
generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
if config_name in target['configurations']:
@ -85,6 +100,10 @@ def GetAllIncludeDirectories(target_list, target_dicts,
# may be done in gyp files to force certain includes to come at the end.
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
# remove this.
if flavor == 'win':
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
cflags = msvs_settings.GetCflags(config_name)
else:
cflags = config['cflags']
for cflag in cflags:
include_dir = ''
@ -146,7 +165,7 @@ def GetCompilerPath(target_list, target_dicts, data):
return 'gcc'
def GetAllDefines(target_list, target_dicts, data, config_name):
def GetAllDefines(target_list, target_dicts, data, config_name, params):
"""Calculate the defines for a project.
Returns:
@ -156,22 +175,33 @@ def GetAllDefines(target_list, target_dicts, data, config_name):
# Get defines declared in the gyp files.
all_defines = {}
flavor = gyp.common.GetFlavor(params)
if flavor == 'win':
generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
if flavor == 'win':
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
extra_defines = msvs_settings.GetComputedDefines(config_name)
else:
extra_defines = []
if config_name in target['configurations']:
config = target['configurations'][config_name]
for define in config['defines']:
target_defines = config['defines']
else:
target_defines = []
for define in target_defines + extra_defines:
split_define = define.split('=', 1)
if len(split_define) == 1:
split_define.append('1')
if split_define[0].strip() in all_defines:
# Already defined
continue
all_defines[split_define[0].strip()] = split_define[1].strip()
# Get default compiler defines (if possible).
if flavor == 'win':
return all_defines # Default defines already processed in the loop above.
cc_target = GetCompilerPath(target_list, target_dicts, data)
if cc_target:
command = shlex.split(cc_target)
@ -250,9 +280,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
'GNU C++', 'GNU C', 'Assembly']
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs, config_name)
shared_intermediate_dirs, config_name,
params)
WriteIncludePaths(out, eclipse_langs, include_dirs)
defines = GetAllDefines(target_list, target_dicts, data, config_name)
defines = GetAllDefines(target_list, target_dicts, data, config_name, params)
WriteMacros(out, eclipse_langs, defines)
out.write('</cdtprojectproperties>\n')

79
tools/gyp/pylib/gyp/generator/make.py

@ -1,4 +1,4 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@ -166,15 +166,11 @@ cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
# TODO(thakis): Find out and document the difference between shared_library and
# loadable_module on mac.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
# -bundle -single_module here (for osmesa.so).
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
LINK_COMMANDS_ANDROID = """\
@ -205,6 +201,24 @@ cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(T
"""
LINK_COMMANDS_AIX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
# Header of toplevel Makefile.
# This should go into the build tree, but it's easier to keep it here for now.
SHARED_HEADER = ("""\
@ -250,6 +264,14 @@ all_deps :=
%(make_global_settings)s
CC.target ?= %(CC.target)s
CFLAGS.target ?= $(CFLAGS)
CXX.target ?= %(CXX.target)s
CXXFLAGS.target ?= $(CXXFLAGS)
LINK.target ?= %(LINK.target)s
LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR)
# C++ apps need to be linked with g++.
#
# Note: flock is used to seralize linking. Linking is a memory-intensive
@ -261,14 +283,6 @@ all_deps :=
# This will allow make to invoke N linker processes as specified in -jN.
LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target)
CC.target ?= %(CC.target)s
CFLAGS.target ?= $(CFLAGS)
CXX.target ?= %(CXX.target)s
CXXFLAGS.target ?= $(CXXFLAGS)
LINK.target ?= %(LINK.target)s
LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR)
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well.
CC.host ?= %(CC.host)s
@ -483,14 +497,6 @@ quiet_cmd_infoplist = INFOPLIST $@
cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
"""
SHARED_HEADER_SUN_COMMANDS = """
# gyp-sun-tool is written next to the root Makefile by gyp.
# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
# already.
quiet_cmd_sun_tool = SUNTOOL $(4) $<
cmd_sun_tool = ./gyp-sun-tool $(4) $< "$@"
"""
def WriteRootHeaderSuffixRules(writer):
extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
@ -978,7 +984,13 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
# Spaces in rule filenames are not supported, but rule variables have
# spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
# The spaces within the variables are valid, so remove the variables
# before checking.
variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
for output in outputs:
output = re.sub(variables_with_spaces, '', output)
assert ' ' not in output, (
"Spaces in rule filenames not yet supported (%s)" % output)
self.WriteLn('all_deps += %s' % ' '.join(outputs))
@ -1402,7 +1414,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
gyp_to_build = gyp.common.InvertRelativePath(self.path)
target_postbuild = self.xcode_settings.GetTargetPostbuilds(
target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
configname,
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
self.output))),
@ -1413,12 +1425,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
else:
ldflags = config.get('ldflags', [])
# Compute an rpath for this output if needed.
if any(dep.endswith('.so') for dep in deps):
if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
# We want to get the literal string "$ORIGIN" into the link command,
# so we need lots of escaping.
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
self.toolset)
library_dirs = config.get('library_dirs', [])
ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
if self.flavor == 'mac':
self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
@ -1881,13 +1895,15 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
options = params['options']
build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
for filename in params['build_files_arg']]
gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
options.toplevel_dir)
if not gyp_binary.startswith(os.sep):
gyp_binary = os.path.join('.', gyp_binary)
root_makefile.write(
"quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
"cmd_regen_makefile = %(cmd)s\n"
"cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
"%(makefile_name)s: %(deps)s\n"
"\t$(call do_cmd,regen_makefile)\n\n" % {
'makefile_name': makefile_name,
@ -1980,25 +1996,30 @@ def GenerateOutput(target_list, target_dicts, data, params):
})
elif flavor == 'solaris':
header_params.update({
'flock': './gyp-sun-tool flock',
'flock': './gyp-flock-tool flock',
'flock_index': 2,
'extra_commands': SHARED_HEADER_SUN_COMMANDS,
})
elif flavor == 'freebsd':
# Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
header_params.update({
'flock': 'lockf',
})
elif flavor == 'aix':
header_params.update({
'link_commands': LINK_COMMANDS_AIX,
'flock': './gyp-flock-tool flock',
'flock_index': 2,
})
header_params.update({
'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
'LINK.target': GetEnvironFallback(('LD_target', 'LD'), '$(LINK)'),
'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
'CC.host': GetEnvironFallback(('CC_host',), 'gcc'),
'AR.host': GetEnvironFallback(('AR_host',), 'ar'),
'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'),
'LINK.host': GetEnvironFallback(('LD_host',), 'g++'),
'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
})
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])

175
tools/gyp/pylib/gyp/generator/msvs.py

@ -66,6 +66,10 @@ generator_additional_non_configuration_keys = [
'msvs_cygwin_shell',
'msvs_large_pdb',
'msvs_shard',
'msvs_external_builder',
'msvs_external_builder_out_dir',
'msvs_external_builder_build_cmd',
'msvs_external_builder_clean_cmd',
]
@ -221,7 +225,7 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
tool = tools[tool_name]
if tool.get(setting):
if only_if_unset: return
if type(tool[setting]) == list:
if type(tool[setting]) == list and type(value) == list:
tool[setting] += value
else:
raise TypeError(
@ -459,8 +463,7 @@ def _FindRuleTriggerFiles(rule, sources):
Returns:
The list of sources that trigger a particular rule.
"""
rule_ext = rule['extension']
return [s for s in sources if s.endswith('.' + rule_ext)]
return rule.get('rule_sources', [])
def _RuleInputsAndOutputs(rule, trigger_file):
@ -784,10 +787,10 @@ def _GenerateRulesForMSVS(p, output_dir, options, spec,
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
_AdjustSourcesForRules(rules, sources, excluded_sources)
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
def _AdjustSourcesForRules(rules, sources, excluded_sources):
def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
# Add outputs generated by each rule (if applicable).
for rule in rules:
# Done if not processing outputs as sources.
@ -800,6 +803,7 @@ def _AdjustSourcesForRules(rules, sources, excluded_sources):
outputs = set(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
if not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
sources.update(outputs)
@ -1027,12 +1031,13 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
spec: The target dictionary containing the properties of the target.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
config: The dictionnary that defines the special processing to be done
config: The dictionary that defines the special processing to be done
for this configuration.
"""
# Get the information for this configuration
include_dirs, resource_include_dirs = _GetIncludeDirs(config)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(config)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
defines = _GetDefines(config)
defines = [_EscapeCppDefineForMSVS(d) for d in defines]
@ -1062,6 +1067,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries.
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
library_dirs)
if out_file:
_ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
# Add defines.
@ -1101,7 +1108,7 @@ def _GetIncludeDirs(config):
"""Returns the list of directories to be used for #include directives.
Arguments:
config: The dictionnary that defines the special processing to be done
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
@ -1117,6 +1124,21 @@ def _GetIncludeDirs(config):
return include_dirs, resource_include_dirs
def _GetLibraryDirs(config):
"""Returns the list of directories to be used for library search paths.
Arguments:
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
"""
library_dirs = config.get('library_dirs', [])
library_dirs = _FixPaths(library_dirs)
return library_dirs
def _GetLibraries(spec):
"""Returns the list of libraries for this configuration.
@ -1181,11 +1203,29 @@ def _GetOutputFilePathAndTool(spec, msbuild):
return out_file, vc_tool, msbuild_tool
def _GetOutputTargetExt(spec):
"""Returns the extension for this target, including the dot
If product_extension is specified, set target_extension to this to avoid
MSB8012, returns None otherwise. Ignores any target_extension settings in
the input files.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
A string with the extension, or None
"""
target_extension = spec.get('product_extension')
if target_extension:
return '.' + target_extension
return None
def _GetDefines(config):
"""Returns the list of preprocessor definitions for this configuation.
Arguments:
config: The dictionnary that defines the special processing to be done
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of preprocessor definitions.
@ -1222,7 +1262,7 @@ def _ConvertToolsToExpectedForm(tools):
"""Convert tools to a form expected by Visual Studio.
Arguments:
tools: A dictionnary of settings; the tool name is the key.
tools: A dictionary of settings; the tool name is the key.
Returns:
A list of Tool objects.
"""
@ -1251,8 +1291,8 @@ def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
Arguments:
p: The target project being generated.
spec: the target project dict.
tools: A dictionnary of settings; the tool name is the key.
config: The dictionnary that defines the special processing to be done
tools: A dictionary of settings; the tool name is the key.
config: The dictionary that defines the special processing to be done
for this configuration.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
@ -1324,6 +1364,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
# Add all inputs to sources and excluded sources.
inputs = set(inputs)
sources.update(inputs)
if not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
if int(a.get('process_outputs_as_sources', False)):
_AddNormalizedSources(sources, a.get('outputs', []))
@ -1695,14 +1736,58 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
obj.set_msbuild_toolset(
_GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
projects[qualified_target] = obj
# Set all the dependencies
# Set all the dependencies, but not if we are using an external builder like
# ninja
for project in projects.values():
if not project.spec.get('msvs_external_builder'):
deps = project.spec.get('dependencies', [])
deps = [projects[d] for d in deps]
project.set_dependencies(deps)
return projects
def _InitNinjaFlavor(options, target_list, target_dicts):
"""Initialize targets for the ninja flavor.
This sets up the necessary variables in the targets to generate msvs projects
that use ninja as an external builder. The variables in the spec are only set
if they have not been set. This allows individual specs to override the
default values initialized here.
Arguments:
options: Options provided to the generator.
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
"""
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec.get('msvs_external_builder'):
# The spec explicitly defined an external builder, so don't change it.
continue
path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
spec['msvs_external_builder'] = 'ninja'
if not spec.get('msvs_external_builder_out_dir'):
spec['msvs_external_builder_out_dir'] = \
options.depth + '/out/$(Configuration)'
if not spec.get('msvs_external_builder_build_cmd'):
spec['msvs_external_builder_build_cmd'] = [
path_to_ninja,
'-C',
'$(OutDir)',
'$(ProjectName)',
]
if not spec.get('msvs_external_builder_clean_cmd'):
spec['msvs_external_builder_clean_cmd'] = [
path_to_ninja,
'-C',
'$(OutDir)',
'-t',
'clean',
'$(ProjectName)',
]
def CalculateVariables(default_variables, params):
"""Generated variables that require params to be known."""
@ -1727,6 +1812,9 @@ def CalculateVariables(default_variables, params):
else:
default_variables['MSVS_OS_BITS'] = 32
if gyp.common.GetFlavor(params) == 'ninja':
default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
def PerformBuild(data, configurations, params):
options = params['options']
@ -1774,6 +1862,10 @@ def GenerateOutput(target_list, target_dicts, data, params):
(target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
# Optionally configure each spec to use ninja as the external builder.
if params.get('flavor') == 'ninja':
_InitNinjaFlavor(options, target_list, target_dicts)
# Prepare the set of configurations.
configs = set()
for qualified_target in target_list:
@ -1964,7 +2056,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
_AdjustSourcesForRules(rules, sources, excluded_sources)
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
class MSBuildRule(object):
@ -2560,6 +2652,10 @@ def _GetMSBuildAttributes(spec, config, build_file):
target_name = prefix + product_name
msbuild_attributes['TargetName'] = target_name
if spec.get('msvs_external_builder'):
external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
# Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
# (depending on the tool used) to avoid MSB8012 warning.
msbuild_tool_map = {
@ -2574,6 +2670,9 @@ def _GetMSBuildAttributes(spec, config, build_file):
out_file = msbuild_settings[msbuild_tool].get('OutputFile')
if out_file:
msbuild_attributes['TargetPath'] = _FixPath(out_file)
target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
if target_ext:
msbuild_attributes['TargetExt'] = target_ext
return msbuild_attributes
@ -2609,6 +2708,9 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
if attributes.get('TargetPath'):
_AddConditionalProperty(properties, condition, 'TargetPath',
attributes['TargetPath'])
if attributes.get('TargetExt'):
_AddConditionalProperty(properties, condition, 'TargetExt',
attributes['TargetExt'])
if new_paths:
_AddConditionalProperty(properties, condition, 'ExecutablePath',
@ -2727,7 +2829,9 @@ def _FinalizeMSBuildSettings(spec, configuration):
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
include_dirs, resource_include_dirs = _GetIncludeDirs(configuration)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(configuration)
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
target_ext = _GetOutputTargetExt(spec)
defines = _GetDefines(configuration)
if converted:
# Visual Studio 2010 has TR1
@ -2760,9 +2864,14 @@ def _FinalizeMSBuildSettings(spec, configuration):
# set, to prevent inheriting default libraries from the enviroment.
_ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
libraries)
_ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
library_dirs)
if out_file:
_ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
only_if_unset=True)
if target_ext:
_ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
only_if_unset=True)
# Add defines.
_ToolAppend(msbuild_settings, 'ClCompile',
'PreprocessorDefinitions', defines)
@ -2778,7 +2887,7 @@ def _FinalizeMSBuildSettings(spec, configuration):
_ToolAppend(msbuild_settings, 'ClCompile',
'PrecompiledHeaderFile', precompiled_header)
_ToolAppend(msbuild_settings, 'ClCompile',
'ForcedIncludeFiles', precompiled_header)
'ForcedIncludeFiles', [precompiled_header])
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec['type'] == 'loadable_module':
@ -2958,15 +3067,25 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
targets_files_of_rules = set()
extension_to_rule_name = {}
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
# Don't generate rules if we are using an external builder like ninja.
if not spec.get('msvs_external_builder'):
_GenerateRulesForMSBuild(project_dir, options, spec,
sources, excluded_sources,
props_files_of_rules, targets_files_of_rules,
actions_to_add, extension_to_rule_name)
else:
rules = spec.get('rules', [])
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
project_dir, sources,
excluded_sources,
list_excluded))
# Don't add actions if we are using an external builder like ninja.
if not spec.get('msvs_external_builder'):
_AddActions(actions_to_add, spec, project.build_file)
_AddCopies(actions_to_add, spec)
@ -3022,6 +3141,9 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
content += import_cpp_targets_section
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
if spec.get('msvs_external_builder'):
content += _GetMSBuildExternalBuilderTargets(spec)
# TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
# has_run_as = _WriteMSVSUserFile(project.path, version, spec)
@ -3030,6 +3152,31 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
return missing_sources
def _GetMSBuildExternalBuilderTargets(spec):
"""Return a list of MSBuild targets for external builders.
Right now, only "Build" and "Clean" targets are generated.
Arguments:
spec: The gyp target spec.
Returns:
List of MSBuild 'Target' specs.
"""
build_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_build_cmd'],
False, False, False, False)
build_target = ['Target', {'Name': 'Build'}]
build_target.append(['Exec', {'Command': build_cmd}])
clean_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_clean_cmd'],
False, False, False, False)
clean_target = ['Target', {'Name': 'Clean'}]
clean_target.append(['Exec', {'Command': clean_cmd}])
return [build_target, clean_target]
def _GetMSBuildExtensions(props_files_of_rules):
extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
for props_file in props_files_of_rules:

803
tools/gyp/pylib/gyp/generator/ninja.py

File diff suppressed because it is too large

8
tools/gyp/pylib/gyp/generator/ninja_test.py

@ -14,9 +14,9 @@ import TestCommon
class TestPrefixesAndSuffixes(unittest.TestCase):
if sys.platform in ('win32', 'cygwin'):
def test_BinaryNamesWindows(self):
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'win')
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
'build.ninja', 'win')
spec = { 'target_name': 'wee' }
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
endswith('.exe'))
@ -25,9 +25,9 @@ class TestPrefixesAndSuffixes(unittest.TestCase):
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.lib'))
if sys.platform == 'linux2':
def test_BinaryNamesLinux(self):
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'linux')
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
'build.ninja', 'linux')
spec = { 'target_name': 'wee' }
self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
'executable'))

1072
tools/gyp/pylib/gyp/generator/scons.py

File diff suppressed because it is too large

73
tools/gyp/pylib/gyp/generator/xcode.py

@ -72,6 +72,7 @@ generator_additional_non_configuration_keys = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
'xcode_create_dependents_test_runner',
]
@ -480,39 +481,6 @@ sys.exit(subprocess.call(sys.argv[1:]))" """
raise
cached_xcode_version = None
def InstalledXcodeVersion():
"""Fetches the installed version of Xcode, returns empty string if it is
unable to figure it out."""
global cached_xcode_version
if not cached_xcode_version is None:
return cached_xcode_version
# Default to an empty string
cached_xcode_version = ''
# Collect the xcodebuild's version information.
try:
import subprocess
cmd = ['/usr/bin/xcodebuild', '-version']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
xcodebuild_version_info = proc.communicate()[0]
# Any error, return empty string
if proc.returncode:
xcodebuild_version_info = ''
except OSError:
# We failed to launch the tool
xcodebuild_version_info = ''
# Pull out the Xcode version itself.
match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE)
if match_line:
cached_xcode_version = match_line.group(1)
# Done!
return cached_xcode_version
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
@ -579,13 +547,13 @@ def ExpandXcodeVariables(string, expansions):
return string
def EscapeXCodeArgument(s):
"""We must escape the arguments that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals."""
s = s.replace('\\', '\\\\')
s = s.replace('"', '\\"')
return '"' + s + '"'
_xcode_define_re = re.compile(r'([\\\"\' ])')
def EscapeXcodeDefine(s):
"""We must escape the defines that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals. However, we
must not quote the define, or Xcode will incorrectly intepret variables
especially $(inherited)."""
return re.sub(_xcode_define_re, r'\\\1', s)
def PerformBuild(data, configurations, params):
@ -675,6 +643,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
'static_library': 'com.apple.product-type.library.static',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
'shared_library+bundle': 'com.apple.product-type.framework',
}
@ -684,11 +653,18 @@ def GenerateOutput(target_list, target_dicts, data, params):
}
type = spec['type']
is_bundle = int(spec.get('mac_bundle', 0))
is_xctest = int(spec.get('mac_xctest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
if type != 'none':
type_bundle_key = type
if is_bundle:
if is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
'(target %s)' % target_name)
elif is_bundle:
type_bundle_key += '+bundle'
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
@ -701,6 +677,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
@ -1053,7 +1032,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
@ -1211,9 +1190,15 @@ exit 1
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
for library_dir in configuration.get('library_dirs', []):
if library_dir not in xcode_standard_library_dirs and (
not xcbc.HasBuildSetting(_library_search_paths_var) or
library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
if 'defines' in configuration:
for define in configuration['defines']:
set_define = EscapeXCodeArgument(define)
set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():

23
tools/gyp/pylib/gyp/generator/xcode_test.py

@ -0,0 +1,23 @@
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the xcode.py file. """
import gyp.generator.xcode as xcode
import unittest
import sys
class TestEscapeXcodeDefine(unittest.TestCase):
if sys.platform == 'darwin':
def test_InheritedRemainsUnescaped(self):
self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
def test_Escaping(self):
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
if __name__ == '__main__':
unittest.main()

242
tools/gyp/pylib/gyp/input.py

@ -22,6 +22,7 @@ import subprocess
import sys
import threading
import time
import traceback
from gyp.common import GypError
@ -57,7 +58,7 @@ def IsPathSection(section):
section = section[:-1]
return section in path_sections or is_path_section_match_re.search(section)
# base_non_configuraiton_keys is a list of key names that belong in the target
# base_non_configuration_keys is a list of key names that belong in the target
# itself and should not be propagated into its configurations. It is merged
# with a list that can come from the generator to
# create non_configuration_keys.
@ -69,7 +70,6 @@ base_non_configuration_keys = [
'default_configuration',
'dependencies',
'dependencies_original',
'link_languages',
'libraries',
'postbuilds',
'product_dir',
@ -85,7 +85,6 @@ base_non_configuration_keys = [
'toolset',
'toolsets',
'type',
'variants',
# Sections that can be found inside targets or configurations, but that
# should not be propagated from targets into their configurations.
@ -108,12 +107,14 @@ invalid_configuration_keys = [
'type',
]
# Controls how the generator want the build file paths.
absolute_build_file_paths = False
# Controls whether or not the generator supports multiple toolsets.
multiple_toolsets = False
# Paths for converting filelist paths to output paths: {
# toplevel,
# qualified_output_dir,
# }
generator_filelist_paths = None
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
"""Return a list of all build files included into build_file_path.
@ -223,10 +224,15 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
if not isinstance(build_file_data, dict):
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
data[build_file_path] = build_file_data
aux_data[build_file_path] = {}
# Scan for includes and merge them in.
if ('skip_includes' not in build_file_data or
not build_file_data['skip_includes']):
try:
if is_target:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
@ -344,10 +350,6 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
else:
variables['DEPTH'] = d.replace('\\', '/')
# If the generator needs absolue paths, then do so.
if absolute_build_file_paths:
build_file_path = os.path.abspath(build_file_path)
if build_file_path in data['target_build_files']:
# Already loaded.
return False
@ -445,7 +447,8 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
def CallLoadTargetBuildFile(global_flags,
build_file_path, data,
aux_data, variables,
includes, depth, check):
includes, depth, check,
generator_input_info):
"""Wrapper around LoadTargetBuildFile for parallel processing.
This wrapper is used when LoadTargetBuildFile is executed in
@ -463,6 +466,7 @@ def CallLoadTargetBuildFile(global_flags,
data_keys = set(data)
aux_data_keys = set(aux_data)
SetGeneratorGlobals(generator_input_info)
result = LoadTargetBuildFile(build_file_path, data,
aux_data, variables,
includes, depth, check, False)
@ -488,8 +492,12 @@ def CallLoadTargetBuildFile(global_flags,
data_out,
aux_data_out,
dependencies)
except GypError, e:
sys.stderr.write("gyp: %s\n" % e)
return None
except Exception, e:
print >>sys.stderr, 'Exception: ', e
print >>sys.stderr, 'Exception:', e
print >>sys.stderr, traceback.format_exc()
return None
@ -550,12 +558,14 @@ class ParallelState(object):
self.condition.release()
def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
variables, includes, depth, check):
def LoadTargetBuildFilesParallel(build_files, data, aux_data,
variables, includes, depth, check,
generator_input_info):
parallel_state = ParallelState()
parallel_state.condition = threading.Condition()
parallel_state.dependencies = [build_file_path]
parallel_state.scheduled = set([build_file_path])
# Make copies of the build_files argument that we can modify while working.
parallel_state.dependencies = list(build_files)
parallel_state.scheduled = set(build_files)
parallel_state.pending = 0
parallel_state.data = data
parallel_state.aux_data = aux_data
@ -564,12 +574,6 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
parallel_state.condition.acquire()
while parallel_state.dependencies or parallel_state.pending:
if parallel_state.error:
print >>sys.stderr, (
'\n'
'Note: an error occurred while running gyp using multiprocessing.\n'
'For more verbose output, set GYP_PARALLEL=0 in your environment.\n'
'If the error only occurs when GYP_PARALLEL=1, '
'please report a bug!')
break
if not parallel_state.dependencies:
parallel_state.condition.wait()
@ -584,7 +588,6 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
global_flags = {
'path_sections': globals()['path_sections'],
'non_configuration_keys': globals()['non_configuration_keys'],
'absolute_build_file_paths': globals()['absolute_build_file_paths'],
'multiple_toolsets': globals()['multiple_toolsets']}
if not parallel_state.pool:
@ -593,16 +596,20 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
CallLoadTargetBuildFile,
args = (global_flags, dependency,
data_in, aux_data_in,
variables, includes, depth, check),
variables, includes, depth, check, generator_input_info),
callback = parallel_state.LoadTargetBuildFileCallback)
except KeyboardInterrupt, e:
parallel_state.pool.terminate()
raise e
parallel_state.condition.release()
if parallel_state.error:
sys.exit()
parallel_state.pool.close()
parallel_state.pool.join()
parallel_state.pool = None
if parallel_state.error:
sys.exit(1)
# Look for the bracket that matches the first bracket seen in a
# string, and return the start and end as a tuple. For example, if
@ -785,7 +792,7 @@ def ExpandVariables(input, phase, variables, build_file):
# Find the build file's directory, so commands can be run or file lists
# generated relative to it.
build_file_dir = os.path.dirname(build_file)
if build_file_dir == '':
if build_file_dir == '' and not file_list:
# If build_file is just a leaf filename indicating a file in the
# current directory, build_file_dir might be an empty string. Set
# it to None to signal to subprocess.Popen that it should run the
@ -802,9 +809,23 @@ def ExpandVariables(input, phase, variables, build_file):
else:
contents_list = contents.split(' ')
replacement = contents_list[0]
path = replacement
if not os.path.isabs(path):
path = os.path.join(build_file_dir, path)
if os.path.isabs(replacement):
raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
if not generator_filelist_paths:
path = os.path.join(build_file_dir, replacement)
else:
if os.path.isabs(build_file_dir):
toplevel = generator_filelist_paths['toplevel']
rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
else:
rel_build_file_dir = build_file_dir
qualified_out_dir = generator_filelist_paths['qualified_out_dir']
path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
replacement = gyp.common.RelativePath(path, build_file_dir)
f = gyp.common.WriteOnDiff(path)
for i in contents_list[1:]:
f.write('%s\n' % i)
@ -843,6 +864,7 @@ def ExpandVariables(input, phase, variables, build_file):
# that don't load quickly, this can be faster than
# <!(python modulename param eters). Do this in |build_file_dir|.
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
if build_file_dir: # build_file_dir may be None (see above).
os.chdir(build_file_dir)
try:
@ -1439,6 +1461,9 @@ class DependencyGraphNode(object):
self.dependencies = []
self.dependents = []
def __repr__(self):
return '<DependencyGraphNode: %r>' % self.ref
def FlattenToList(self):
# flat_list is the sorted list of dependencies - actually, the list items
# are the "ref" attributes of DependencyGraphNodes. Every target will
@ -1481,6 +1506,27 @@ class DependencyGraphNode(object):
return flat_list
def FindCycles(self, path=None):
"""
Returns a list of cycles in the graph, where each cycle is its own list.
"""
if path is None:
path = [self]
results = []
for node in self.dependents:
if node in path:
cycle = [node]
for part in path:
cycle.append(part)
if part == node:
break
results.append(tuple(cycle))
else:
results.extend(node.FindCycles([node] + path))
return list(set(results))
def DirectDependencies(self, dependencies=None):
"""Returns a list of just direct dependencies."""
if dependencies == None:
@ -1556,7 +1602,8 @@ class DependencyGraphNode(object):
return dependencies
def LinkDependencies(self, targets, dependencies=None, initial=True):
def _LinkDependenciesInternal(self, targets, include_shared_libraries,
dependencies=None, initial=True):
"""Returns a list of dependency targets that are linked into this target.
This function has a split personality, depending on the setting of
@ -1566,6 +1613,9 @@ class DependencyGraphNode(object):
When adding a target to the list of dependencies, this function will
recurse into itself with |initial| set to False, to collect dependencies
that are linked into the linkable target for which the list is being built.
If |include_shared_libraries| is False, the resulting dependencies will not
include shared_library targets that are linked into this target.
"""
if dependencies == None:
dependencies = []
@ -1610,6 +1660,16 @@ class DependencyGraphNode(object):
if not initial and target_type in ('executable', 'loadable_module'):
return dependencies
# Shared libraries are already fully linked. They should only be included
# in |dependencies| when adjusting static library dependencies (in order to
# link against the shared_library's import lib), but should not be included
# in |dependencies| when propagating link_settings.
# The |include_shared_libraries| flag controls which of these two cases we
# are handling.
if (not initial and target_type == 'shared_library' and
not include_shared_libraries):
return dependencies
# The target is linkable, add it to the list of link dependencies.
if self.ref not in dependencies:
dependencies.append(self.ref)
@ -1619,10 +1679,32 @@ class DependencyGraphNode(object):
# this target linkable. Always look at dependencies of the initial
# target, and always look at dependencies of non-linkables.
for dependency in self.dependencies:
dependency.LinkDependencies(targets, dependencies, False)
dependency._LinkDependenciesInternal(targets,
include_shared_libraries,
dependencies, False)
return dependencies
def DependenciesForLinkSettings(self, targets):
"""
Returns a list of dependency targets whose link_settings should be merged
into this target.
"""
# TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
# link_settings are propagated. So for now, we will allow it, unless the
# 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
# False. Once chrome is fixed, we can remove this flag.
include_shared_libraries = \
targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
return self._LinkDependenciesInternal(targets, include_shared_libraries)
def DependenciesToLinkAgainst(self, targets):
"""
Returns a list of dependency targets that are linked into this target.
"""
return self._LinkDependenciesInternal(targets, True)
def BuildDependencyList(targets):
# Create a DependencyGraphNode for each target. Put it into a dict for easy
@ -1713,10 +1795,16 @@ def VerifyNoGYPFileCircularDependencies(targets):
for file in dependency_nodes.iterkeys():
if not file in flat_list:
bad_files.append(file)
common_path_prefix = os.path.commonprefix(dependency_nodes)
cycles = []
for cycle in root_node.FindCycles():
simplified_paths = []
for node in cycle:
assert(node.ref.startswith(common_path_prefix))
simplified_paths.append(node.ref[len(common_path_prefix):])
cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
raise DependencyGraphNode.CircularException, \
'Some files not reachable, cycle in .gyp file dependency graph ' + \
'detected involving some or all of: ' + \
' '.join(bad_files)
'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
@ -1733,7 +1821,8 @@ def DoDependentSettings(key, flat_list, targets, dependency_nodes):
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
elif key == 'link_settings':
dependencies = dependency_nodes[target].LinkDependencies(targets)
dependencies = \
dependency_nodes[target].DependenciesForLinkSettings(targets)
else:
raise GypError("DoDependentSettings doesn't know how to determine "
'dependencies for ' + key)
@ -1806,7 +1895,8 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
# target. Add them to the dependencies list if they're not already
# present.
link_dependencies = dependency_nodes[target].LinkDependencies(targets)
link_dependencies = \
dependency_nodes[target].DependenciesToLinkAgainst(targets)
for dependency in link_dependencies:
if dependency == target:
continue
@ -2379,6 +2469,8 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
rule_names[rule_name] = rule
rule_extension = rule['extension']
if rule_extension.startswith('.'):
rule_extension = rule_extension[1:]
if rule_extension in rule_extensions:
raise GypError(('extension %s associated with multiple rules, ' +
'target %s rules %s and %s') %
@ -2393,7 +2485,6 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
raise GypError(
'rule_sources must not exist in input, target %s rule %s' %
(target, rule_name))
extension = rule['extension']
rule_sources = []
source_keys = ['sources']
@ -2403,7 +2494,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
(source_root, source_extension) = os.path.splitext(source)
if source_extension.startswith('.'):
source_extension = source_extension[1:]
if source_extension == extension:
if source_extension == rule_extension:
rule_sources.append(source)
if len(rule_sources) > 0:
@ -2490,6 +2581,41 @@ def TurnIntIntoStrInList(the_list):
TurnIntIntoStrInList(item)
def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
data):
"""Return only the targets that are deep dependencies of |root_targets|."""
qualified_root_targets = []
for target in root_targets:
target = target.strip()
qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
if not qualified_targets:
raise GypError("Could not find target %s" % target)
qualified_root_targets.extend(qualified_targets)
wanted_targets = {}
for target in qualified_root_targets:
wanted_targets[target] = targets[target]
for dependency in dependency_nodes[target].DeepDependencies():
wanted_targets[dependency] = targets[dependency]
wanted_flat_list = [t for t in flat_list if t in wanted_targets]
# Prune unwanted targets from each build_file's data dict.
for build_file in data['target_build_files']:
if not 'targets' in data[build_file]:
continue
new_targets = []
for target in data[build_file]['targets']:
qualified_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if qualified_name in wanted_targets:
new_targets.append(target)
data[build_file]['targets'] = new_targets
return wanted_targets, wanted_flat_list
def VerifyNoCollidingTargets(targets):
"""Verify that no two targets in the same directory share the same name.
@ -2517,10 +2643,9 @@ def VerifyNoCollidingTargets(targets):
used[key] = gyp
def Load(build_files, variables, includes, depth, generator_input_info, check,
circular_check, parallel):
def SetGeneratorGlobals(generator_input_info):
# Set up path_sections and non_configuration_keys with the default data plus
# the generator-specifc data.
# the generator-specific data.
global path_sections
path_sections = base_path_sections[:]
path_sections.extend(generator_input_info['path_sections'])
@ -2529,18 +2654,17 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
non_configuration_keys = base_non_configuration_keys[:]
non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
# TODO(mark) handle variants if the generator doesn't want them directly.
generator_handles_variants = \
generator_input_info['generator_handles_variants']
global absolute_build_file_paths
absolute_build_file_paths = \
generator_input_info['generator_wants_absolute_build_file_paths']
global multiple_toolsets
multiple_toolsets = generator_input_info[
'generator_supports_multiple_toolsets']
global generator_filelist_paths
generator_filelist_paths = generator_input_info['generator_filelist_paths']
def Load(build_files, variables, includes, depth, generator_input_info, check,
circular_check, parallel, root_targets):
SetGeneratorGlobals(generator_input_info)
# A generator can have other lists (in addition to sources) be processed
# for rules.
extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
@ -2554,16 +2678,16 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
# track of the keys corresponding to "target" files.
data = {'target_build_files': set()}
aux_data = {}
for build_file in build_files:
# Normalize paths everywhere. This is important because paths will be
# used as keys to the data dict and for references between input files.
build_file = os.path.normpath(build_file)
try:
build_files = set(map(os.path.normpath, build_files))
if parallel:
print >>sys.stderr, 'Using parallel processing.'
LoadTargetBuildFileParallel(build_file, data, aux_data,
variables, includes, depth, check)
LoadTargetBuildFilesParallel(build_files, data, aux_data,
variables, includes, depth, check,
generator_input_info)
else:
for build_file in build_files:
try:
LoadTargetBuildFile(build_file, data, aux_data,
variables, includes, depth, check, True)
except Exception, e:
@ -2607,6 +2731,12 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
[dependency_nodes, flat_list] = BuildDependencyList(targets)
if root_targets:
# Remove, from |targets| and |flat_list|, the targets that are not deep
# dependencies of the targets specified in |root_targets|.
targets, flat_list = PruneUnwantedTargets(
targets, flat_list, dependency_nodes, root_targets, data)
# Check that no two targets in the same directory have the same name.
VerifyNoCollidingTargets(flat_list)

90
tools/gyp/pylib/gyp/input_test.py

@ -0,0 +1,90 @@
#!/usr/bin/env python
# Copyright 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the input.py file."""
import gyp.input
import unittest
import sys
class TestFindCycles(unittest.TestCase):
def setUp(self):
self.nodes = {}
for x in ('a', 'b', 'c', 'd', 'e'):
self.nodes[x] = gyp.input.DependencyGraphNode(x)
def _create_dependency(self, dependent, dependency):
dependent.dependencies.append(dependency)
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
for label, node in self.nodes.iteritems():
self.assertEquals([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['a'])],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
self.nodes['a'].FindCycles())
self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['b'])
cycles = self.nodes['a'].FindCycles()
self.assertTrue(
(self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
self.assertTrue(
(self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
self.assertEquals(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
self.assertEquals([(self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
self.nodes['e'],
self.nodes['a'])],
self.nodes['a'].FindCycles())
if __name__ == '__main__':
unittest.main()

73
tools/gyp/pylib/gyp/mac_tool.py

@ -9,6 +9,7 @@ These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import json
import os
import plistlib
import re
@ -47,22 +48,33 @@ class MacTool(object):
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
# TODO(thakis): This copies file attributes like mtime, while the
# single-file branch below doesn't. This should probably be changed to
# be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest)
else:
shutil.copyfile(source, dest)
shutil.copy(source, dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
tools_dir = os.environ.get('DEVELOPER_BIN_DIR', '/usr/bin')
args = [os.path.join(tools_dir, 'ibtool'), '--errors', '--warnings',
'--notices', '--output-format', 'human-readable-text', '--compile',
dest, source]
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
base = os.path.dirname(os.path.realpath(__file__))
if os.path.relpath(source):
source = os.path.join(base, source)
if os.path.relpath(dest):
dest = os.path.join(base, dest)
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
'--output-format', 'human-readable-text', '--compile', dest, source]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
@ -87,16 +99,14 @@ class MacTool(object):
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
s = open(source).read()
s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
fp = open(dest, 'w')
args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code',
'UTF-16', source]
subprocess.call(args, stdout=fp)
fp = open(dest, 'wb')
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
def _DetectInputEncoding(self, file_name):
@ -110,28 +120,58 @@ class MacTool(object):
return None
fp.close()
if header.startswith("\xFE\xFF"):
return "UTF-16BE"
return "UTF-16"
elif header.startswith("\xFF\xFE"):
return "UTF-16LE"
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
def ExecCopyInfoPlist(self, source, dest):
def ExecCopyInfoPlist(self, source, dest, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
lines = plistlib.writePlistToString(plist)
# Go through all the environment variables and replace them as variables in
# the file.
IDENT_RE = re.compile('[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
lines = string.replace(lines, evar, os.environ[key])
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
# template these days, and :identifier was used earlier. They are used to
# convert non-url characters into things that look like valid urls --
# except that the replacement character for :identifier, '_' isn't valid
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
# Remove any keys with values that haven't been replaced.
lines = lines.split('\n')
for i in range(len(lines)):
if lines[i].strip().startswith("<string>${"):
lines[i] = None
lines[i - 1] = None
lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
@ -173,8 +213,9 @@ class MacTool(object):
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out 'libtool: file: foo.o has no symbols'."""
libtool_re = re.compile(r'^libtool: file: .* has no symbols$')
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
_, err = libtoolout.communicate()
for line in err.splitlines():

143
tools/gyp/pylib/gyp/msvs_emulation.py

@ -167,12 +167,17 @@ class MsvsSettings(object):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
replacements = {
'$(OutDir)\\': base_to_build + '\\' if base_to_build else '',
'$(OutDir)\\': target_dir,
'$(TargetDir)\\': target_dir,
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(InputPath)': '${source}',
'$(InputName)': '${root}',
'$(ProjectName)': self.spec['target_name'],
'$(TargetName)': target_name,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
}
@ -197,7 +202,8 @@ class MsvsSettings(object):
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
return [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
@ -289,6 +295,15 @@ class MsvsSettings(object):
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
@ -320,8 +335,11 @@ class MsvsSettings(object):
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O')
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
@ -345,6 +363,9 @@ class MsvsSettings(object):
cl('AdditionalOptions', prefix='')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
@ -402,16 +423,23 @@ class MsvsSettings(object):
lib('AdditionalOptions')
return libflags
def _GetDefFileAsLdflags(self, spec, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = ''
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
ldflags.append('/DEF:"%s"' % gyp_to_build_path(def_files[0]))
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, is_executable):
@ -421,7 +449,7 @@ class MsvsSettings(object):
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(self.spec, ldflags, gyp_to_build_path)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
@ -433,6 +461,10 @@ class MsvsSettings(object):
pdb = self.GetPDBName(config, expand_special)
if pdb:
ldflags.append('/PDB:' + pdb)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
@ -480,17 +512,32 @@ class MsvsSettings(object):
def _GetLdManifestFlags(self, config, name, allow_isolation):
"""Returns the set of flags that need to be added to the link to generate
a default manifest, as well as the name of the generated file."""
# Add manifest flags that mirror the defaults in VS. Chromium dev builds
# do not currently use any non-default settings, but we could parse
# VCManifestTool blocks if Chromium or other projects need them in the
# future. Of particular note, we do not yet support EmbedManifest because
# it complicates incremental linking.
# The manifest is generated by default.
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
'''/MANIFESTUAC:"level='asInvoker' uiAccess='false'"'''
]
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
flags.append('''/MANIFESTUAC:"level='%s' uiAccess='%s'"''' %
(execution_level_map[execution_level], ui_access))
else:
flags.append('/MANIFESTUAC:NO')
if allow_isolation:
flags.append('/ALLOWISOLATION')
return flags, output_name
@ -500,9 +547,6 @@ class MsvsSettings(object):
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if (self._Setting(
('VCManifestTool', 'EmbedManifest'), config, default='') == 'true'):
print 'gyp/msvs_emulation.py: "EmbedManifest: true" not yet supported.'
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
@ -516,6 +560,18 @@ class MsvsSettings(object):
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config)
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
@ -626,11 +682,12 @@ class PrecompiledHeader(object):
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs):
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatability
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
@ -639,7 +696,7 @@ class PrecompiledHeader(object):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self):
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
@ -724,6 +781,14 @@ def _FormatAsEnvironmentBlock(envvar_dict):
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
@ -739,10 +804,16 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
return
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
for arch in ('x86', 'x64'):
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
@ -754,6 +825,15 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
@ -769,3 +849,22 @@ def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32

18
tools/gyp/pylib/gyp/ninja_syntax.py

@ -34,8 +34,13 @@ class Writer(object):
value = ' '.join(filter(None, value)) # Filter out empty strings.
self._line('%s = %s' % (key, value), indent)
def pool(self, name, depth):
self._line('pool %s' % name)
self.variable('depth', depth, indent=1)
def rule(self, name, command, description=None, depfile=None,
generator=False, restat=False, rspfile=None, rspfile_content=None):
generator=False, pool=None, restat=False, rspfile=None,
rspfile_content=None, deps=None):
self._line('rule %s' % name)
self.variable('command', command, indent=1)
if description:
@ -44,12 +49,16 @@ class Writer(object):
self.variable('depfile', depfile, indent=1)
if generator:
self.variable('generator', '1', indent=1)
if pool:
self.variable('pool', pool, indent=1)
if restat:
self.variable('restat', '1', indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
if deps:
self.variable('deps', deps, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None):
@ -67,13 +76,12 @@ class Writer(object):
all_inputs.append('||')
all_inputs.extend(order_only)
self._line('build %s: %s %s' % (' '.join(out_outputs),
rule,
' '.join(all_inputs)))
self._line('build %s: %s' % (' '.join(out_outputs),
' '.join([rule] + all_inputs)))
if variables:
if isinstance(variables, dict):
iterator = variables.iteritems()
iterator = iter(variables.items())
else:
iterator = iter(variables)

37
tools/gyp/pylib/gyp/win_tool.py

@ -9,7 +9,6 @@
These functions are executed via gyp-win-tool when using the ninja generator.
"""
from ctypes import windll, wintypes
import os
import shutil
import subprocess
@ -25,31 +24,6 @@ def main(args):
sys.exit(exit_code)
class LinkLock(object):
"""A flock-style lock to limit the number of concurrent links to one.
Uses a session-local mutex based on the file's directory.
"""
def __enter__(self):
name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_')
self.mutex = windll.kernel32.CreateMutexW(
wintypes.c_int(0),
wintypes.c_int(0),
wintypes.create_unicode_buffer(name))
assert self.mutex
result = windll.kernel32.WaitForSingleObject(
self.mutex, wintypes.c_int(0xFFFFFFFF))
# 0x80 means another process was killed without releasing the mutex, but
# that this process has been given ownership. This is fine for our
# purposes.
assert result in (0, 0x80), (
"%s, %s" % (result, windll.kernel32.GetLastError()))
def __exit__(self, type, value, traceback):
windll.kernel32.ReleaseMutex(self.mutex)
windll.kernel32.CloseHandle(self.mutex)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
@ -96,7 +70,6 @@ class WinTool(object):
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
with LinkLock():
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
@ -119,6 +92,16 @@ class WinTool(object):
print line
return popen.returncode
def ExecManifestToRc(self, arch, *args):
"""Creates a resource file pointing a SxS assembly manifest.
|args| is tuple containing path to resource file, path to manifest file
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
manifest_path, resource_path, resource_name = args
with open(resource_path, 'wb') as output:
output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
resource_name,
os.path.abspath(manifest_path).replace('\\', '/')))
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't

307
tools/gyp/pylib/gyp/xcode_emulation.py

@ -7,6 +7,7 @@ This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import copy
import gyp.common
import os.path
import re
@ -22,9 +23,19 @@ class XcodeSettings(object):
# at class-level for efficiency.
_sdk_path_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_plist_cache = {}
# Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_codesigning_key_cache = {}
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
@ -33,6 +44,10 @@ class XcodeSettings(object):
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
@ -40,6 +55,23 @@ class XcodeSettings(object):
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith(']')]
for key in conditional_keys:
# If you need more, speak up at http://crbug.com/122592
if key.endswith("[sdk=iphoneos*]"):
if configname.endswith("iphoneos"):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
del settings[key]
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
@ -79,7 +111,7 @@ class XcodeSettings(object):
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
return '.app'
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
@ -104,6 +136,8 @@ class XcodeSettings(object):
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
@ -116,6 +150,8 @@ class XcodeSettings(object):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
@ -160,7 +196,7 @@ class XcodeSettings(object):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library'):
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
@ -222,18 +258,32 @@ class XcodeSettings(object):
else:
return self._GetStandaloneBinaryPath()
def _GetSdkVersionInfoItem(self, sdk, infoitem):
job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
# CURRENT_ARCH / NATIVE_ARCH env vars?
return self.xcode_settings[configname].get('ARCHS', ['i386'])
def _GetStdout(self, cmdlist):
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running xcodebuild' % job.returncode)
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def _SdkPath(self):
sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx')
def _GetSdkVersionInfoItem(self, sdk, infoitem):
return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
def _SdkRoot(self, configname):
if configname is None:
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
if sdk_root not in XcodeSettings._sdk_path_cache:
XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
sdk_root, 'Path')
@ -251,7 +301,7 @@ class XcodeSettings(object):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname):
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
@ -298,6 +348,11 @@ class XcodeSettings(object):
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
@ -319,6 +374,9 @@ class XcodeSettings(object):
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
@ -351,6 +409,9 @@ class XcodeSettings(object):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
@ -404,13 +465,22 @@ class XcodeSettings(object):
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
'YES', default='NO'):
flags.append('-Wobjc-missing-property-synthesis')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
@ -419,6 +489,8 @@ class XcodeSettings(object):
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
@ -513,7 +585,7 @@ class XcodeSettings(object):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path):
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
@ -555,6 +627,9 @@ class XcodeSettings(object):
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
@ -566,7 +641,7 @@ class XcodeSettings(object):
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name:
if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
@ -613,19 +688,25 @@ class XcodeSettings(object):
del result[key]
return result
def GetPerConfigSetting(self, setting, configname, default=None):
if configname in self.xcode_settings:
return self.xcode_settings[configname].get(setting, default)
else:
return self.GetPerTargetSetting(setting, default)
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
first_pass = True
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
first_pass = False
is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, spec['target_name']))
"(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
@ -641,7 +722,7 @@ class XcodeSettings(object):
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self._IsBundle():
if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
@ -683,7 +764,8 @@ class XcodeSettings(object):
self.configname = None
return result
def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
def _GetTargetPostbuilds(self, configname, output, output_binary,
quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
@ -691,7 +773,51 @@ class XcodeSettings(object):
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _AdjustLibrary(self, library):
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and self.spec['type'] == "executable"):
return []
identity = self.xcode_settings[configname].get('CODE_SIGN_IDENTITY', '')
if identity == '':
return []
if identity not in XcodeSettings._codesigning_key_cache:
proc = subprocess.Popen(['security', 'find-identity', '-p', 'codesigning',
'-v'], stdout=subprocess.PIPE)
output = proc.communicate()[0].strip()
key = None
for item in output.split("\n"):
if identity in item:
assert key == None, (
"Multiple codesigning identities for identity: %s" %
identity)
key = item.split(' ')[1]
XcodeSettings._codesigning_key_cache[identity] = key
key = XcodeSettings._codesigning_key_cache[identity]
if key:
# Warn for any unimplemented signing xcode keys.
unimpl = ['CODE_SIGN_RESOURCE_RULES_PATH', 'OTHER_CODE_SIGN_FLAGS',
'CODE_SIGN_ENTITLEMENTS']
keys = set(self.xcode_settings[configname].keys())
unimpl = set(unimpl) & keys
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring:', \
' '.join(unimpl)
return ['codesign --force --sign %s %s' % (key, output_binary)]
return []
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
"""Returns a list of shell commands that should run before and after
|postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
return pre + postbuilds + post
def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
@ -700,15 +826,76 @@ class XcodeSettings(object):
l = '-l' + m.group(1)
else:
l = library
return l.replace('$(SDKROOT)', self._SdkPath())
return l.replace('$(SDKROOT)', self._SdkPath(config_name))
def AdjustLibraries(self, libraries):
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [ self._AdjustLibrary(library) for library in libraries]
libraries = [self._AdjustLibrary(library, config_name)
for library in libraries]
return libraries
def _BuildMachineOSBuild(self):
return self._GetStdout(['sw_vers', '-buildVersion'])
def _XcodeVersion(self):
# `xcodebuild -version` output looks like
# Xcode 4.6.3
# Build version 4H1503
# or like
# Xcode 3.2.6
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
build = build.split()[-1]
return version, build
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
return [int(x) for x in family.split(',')]
def GetExtraPlistItems(self, configname=None):
"""Returns a dictionary with extra items to insert into Info.plist."""
if configname not in XcodeSettings._plist_cache:
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
xcode, xcode_build = self._XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductBuildVersion')
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
cache['DTPlatformName'] = cache['DTSDKName']
if configname.endswith("iphoneos"):
cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
# XcodeSettings.
items = dict(XcodeSettings._plist_cache[configname])
if self.isIOS:
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
@ -760,21 +947,28 @@ class MacPrefixHeader(object):
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def GetInclude(self, lang):
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self.compiled_headers[lang]
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang):
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self.compiled_headers[lang] + '.gch'
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs):
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
@ -792,20 +986,20 @@ class MacPrefixHeader(object):
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang)))
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self):
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c'), '-x c-header', 'c', self.header),
(self._Gch('cc'), '-x c++-header', 'cc', self.header),
(self._Gch('m'), '-x objective-c-header', 'm', self.header),
(self._Gch('mm'), '-x objective-c++-header', 'mm', self.header),
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
@ -871,14 +1065,17 @@ def GetMacBundleResources(product_dir, xcode_settings, resources):
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = output[0:-3] + 'nib'
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the sourc plist path, relative to the
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
@ -957,8 +1154,8 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerTargetSetting('SDKROOT'):
env['SDKROOT'] = xcode_settings._SdkPath()
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
@ -1081,3 +1278,35 @@ def GetSpecPostbuildCommands(spec, quiet=False):
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict['configurations'].values():
if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
return True
return False
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
to build for iOS devices."""
for target_dict in targets.values():
for config_name in target_dict['configurations'].keys():
config = target_dict['configurations'][config_name]
new_config_name = config_name + '-iphoneos'
new_config_dict = copy.deepcopy(config)
if target_dict['toolset'] == 'target':
new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
target_dict['configurations'][new_config_name] = new_config_dict
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts

28
tools/gyp/pylib/gyp/xcodeproj_file.py

@ -169,7 +169,7 @@ _quoted = re.compile('___')
# This pattern should match any character that needs to be escaped by
# XCObject._EncodeString. See that function.
_escaped = re.compile('[\\\\"]|[^ -~]')
_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
# Used by SourceTreeAndPathFromPath
@ -557,9 +557,9 @@ class XCObject(object):
# 10 ^J NL is encoded as "\n"
# 13 ^M CR is encoded as "\n" rendering it indistinguishable from
# 10 ^J NL
# All other nonprintable characters within the ASCII range (0 through 127
# inclusive) are encoded as "\U001f" referring to the Unicode code point in
# hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
# All other characters within the ASCII control character range (0 through
# 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
# in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
# Characters above the ASCII range are passed through to the output encoded
# as UTF-8 without any escaping. These mappings are contained in the
# class' _encode_transforms list.
@ -1483,8 +1483,11 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
'cpp': 'sourcecode.cpp.cpp',
'css': 'text.css',
'cxx': 'sourcecode.cpp.cpp',
'dart': 'sourcecode',
'dylib': 'compiled.mach-o.dylib',
'framework': 'wrapper.framework',
'gyp': 'sourcecode',
'gypi': 'sourcecode',
'h': 'sourcecode.c.h',
'hxx': 'sourcecode.cpp.h',
'icns': 'image.icns',
@ -1512,8 +1515,15 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
'y': 'sourcecode.yacc',
}
prop_map = {
'dart': 'explicitFileType',
'gyp': 'explicitFileType',
'gypi': 'explicitFileType',
}
if is_dir:
file_type = 'folder'
prop_name = 'lastKnownFileType'
else:
basename = posixpath.basename(self._properties['path'])
(root, ext) = posixpath.splitext(basename)
@ -1528,8 +1538,9 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
# for unrecognized files not containing text. Xcode seems to choose
# based on content.
file_type = extension_map.get(ext, 'text')
prop_name = prop_map.get(ext, 'lastKnownFileType')
self._properties['lastKnownFileType'] = file_type
self._properties[prop_name] = file_type
class PBXVariantGroup(PBXGroup, XCFileLikeElement):
@ -2239,6 +2250,8 @@ class PBXNativeTarget(XCTarget):
'lib', '.a'],
'com.apple.product-type.tool': ['compiled.mach-o.executable',
'', ''],
'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle',
'', '.xctest'],
'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
'', '.so'],
}
@ -2292,6 +2305,11 @@ class PBXNativeTarget(XCTarget):
if force_extension is None:
force_extension = suffix[1:]
if self._properties['productType'] == \
'com.apple.product-type-bundle.unit.test':
if force_extension is None:
force_extension = suffix[1:]
if force_extension is not None:
# If it's a wrapper (bundle), set WRAPPER_EXTENSION.
if filetype.startswith('wrapper.'):

11
tools/gyp/setup.py

@ -4,10 +4,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from distutils.core import setup
from distutils.command.install import install
from distutils.command.install_lib import install_lib
from distutils.command.install_scripts import install_scripts
from setuptools import setup
setup(
name='gyp',
@ -18,9 +15,5 @@ setup(
url='http://code.google.com/p/gyp',
package_dir = {'': 'pylib'},
packages=['gyp', 'gyp.generator'],
scripts = ['gyp'],
cmdclass = {'install': install,
'install_lib': install_lib,
'install_scripts': install_scripts},
entry_points = {'console_scripts': ['gyp=gyp:script_main'] }
)

1
tools/gyp/tools/emacs/gyp.el

@ -43,6 +43,7 @@
(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
;;; Font-lock support

0
tools/gyp_node → tools/gyp_node.py

Loading…
Cancel
Save