Browse Source

tools: update gyp to r1535

This commit contains one additional patch that makes gyp work on DragonFlyBSD,
see https://codereview.chromium.org/11348152/ for details.
v0.9.4-release
Ben Noordhuis 12 years ago
parent
commit
38c52a0575
  1. 4
      tools/gyp/gyptest.py
  2. 22
      tools/gyp/pylib/gyp/MSVSNew.py
  3. 13
      tools/gyp/pylib/gyp/MSVSVersion.py
  4. 32
      tools/gyp/pylib/gyp/__init__.py
  5. 26
      tools/gyp/pylib/gyp/common.py
  6. 28
      tools/gyp/pylib/gyp/common_test.py
  7. 27
      tools/gyp/pylib/gyp/generator/android.py
  8. 31
      tools/gyp/pylib/gyp/generator/dump_dependency_json.py
  9. 114
      tools/gyp/pylib/gyp/generator/make.py
  10. 46
      tools/gyp/pylib/gyp/generator/msvs.py
  11. 6
      tools/gyp/pylib/gyp/generator/msvs_test.py
  12. 223
      tools/gyp/pylib/gyp/generator/ninja.py
  13. 25
      tools/gyp/pylib/gyp/generator/scons.py
  14. 19
      tools/gyp/pylib/gyp/generator/xcode.py
  15. 456
      tools/gyp/pylib/gyp/input.py
  16. 5
      tools/gyp/pylib/gyp/mac_tool.py
  17. 113
      tools/gyp/pylib/gyp/msvs_emulation.py
  18. 15
      tools/gyp/pylib/gyp/ninja_syntax.py
  19. 68
      tools/gyp/pylib/gyp/system_test.py
  20. 43
      tools/gyp/pylib/gyp/win_tool.py
  21. 21
      tools/gyp/pylib/gyp/xcode_emulation.py
  22. 113
      tools/gyp/pylib/gyp/xcodeproj_file.py

4
tools/gyp/gyptest.py

@ -171,7 +171,9 @@ def main(argv=None):
os.chdir(opts.chdir) os.chdir(opts.chdir)
if opts.path: if opts.path:
os.environ['PATH'] += ':' + ':'.join(opts.path) extra_path = [os.path.abspath(p) for p in opts.path]
extra_path = os.pathsep.join(extra_path)
os.environ['PATH'] += os.pathsep + extra_path
if not args: if not args:
if not opts.all: if not opts.all:

22
tools/gyp/pylib/gyp/MSVSNew.py

@ -59,7 +59,13 @@ def MakeGuid(name, seed='msvs_new'):
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
class MSVSFolder(object): class MSVSSolutionEntry(object):
def __cmp__(self, other):
# Sort by name then guid (so things are in order on vs2008).
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
class MSVSFolder(MSVSSolutionEntry):
"""Folder in a Visual Studio project or solution.""" """Folder in a Visual Studio project or solution."""
def __init__(self, path, name = None, entries = None, def __init__(self, path, name = None, entries = None,
@ -85,7 +91,7 @@ class MSVSFolder(object):
self.guid = guid self.guid = guid
# Copy passed lists (or set to empty lists) # Copy passed lists (or set to empty lists)
self.entries = list(entries or []) self.entries = sorted(list(entries or []))
self.items = list(items or []) self.items = list(items or [])
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
@ -100,7 +106,7 @@ class MSVSFolder(object):
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
class MSVSProject(object): class MSVSProject(MSVSSolutionEntry):
"""Visual Studio project.""" """Visual Studio project."""
def __init__(self, path, name = None, dependencies = None, guid = None, def __init__(self, path, name = None, dependencies = None, guid = None,
@ -229,15 +235,7 @@ class MSVSSolution:
if isinstance(e, MSVSFolder): if isinstance(e, MSVSFolder):
entries_to_check += e.entries entries_to_check += e.entries
# Sort by name then guid (so things are in order on vs2008). all_entries = sorted(all_entries)
def NameThenGuid(a, b):
if a.name < b.name: return -1
if a.name > b.name: return 1
if a.get_guid() < b.get_guid(): return -1
if a.get_guid() > b.get_guid(): return 1
return 0
all_entries = sorted(all_entries, NameThenGuid)
# Open file and print header # Open file and print header
f = writer(self.path) f = writer(self.path)

13
tools/gyp/pylib/gyp/MSVSVersion.py

@ -9,6 +9,7 @@ import os
import re import re
import subprocess import subprocess
import sys import sys
import gyp
class VisualStudioVersion(object): class VisualStudioVersion(object):
@ -193,6 +194,8 @@ def _CreateVersion(name, path, sdk_based=False):
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
passed in that doesn't match a value in versions python will throw a error. passed in that doesn't match a value in versions python will throw a error.
""" """
if path:
path = os.path.normpath(path)
versions = { versions = {
'2012': VisualStudioVersion('2012', '2012': VisualStudioVersion('2012',
'Visual Studio 2012', 'Visual Studio 2012',
@ -264,6 +267,14 @@ def _CreateVersion(name, path, sdk_based=False):
return versions[str(name)] return versions[str(name)]
def _ConvertToCygpath(path):
"""Convert to cygwin path if we are using cygwin."""
if sys.platform == 'cygwin':
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
path = p.communicate()[0].strip()
return path
def _DetectVisualStudioVersions(versions_to_check, force_express): def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions. """Collect the list of installed visual studio versions.
@ -294,6 +305,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
path = _RegistryGetValue(keys[index], 'InstallDir') path = _RegistryGetValue(keys[index], 'InstallDir')
if not path: if not path:
continue continue
path = _ConvertToCygpath(path)
# Check for full. # Check for full.
full_path = os.path.join(path, 'devenv.exe') full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, 'vcexpress.exe') express_path = os.path.join(path, 'vcexpress.exe')
@ -314,6 +326,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
path = _RegistryGetValue(keys[index], version) path = _RegistryGetValue(keys[index], version)
if not path: if not path:
continue continue
path = _ConvertToCygpath(path)
versions.append(_CreateVersion(version_to_year[version] + 'e', versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True)) os.path.join(path, '..'), sdk_based=True))

32
tools/gyp/pylib/gyp/__init__.py

@ -12,6 +12,7 @@ import re
import shlex import shlex
import sys import sys
import traceback import traceback
from gyp.common import GypError
# Default debug modes for GYP # Default debug modes for GYP
debug = {} debug = {}
@ -44,15 +45,9 @@ def FindBuildFiles():
return build_files return build_files
class GypError(Exception):
"""Error class representing an error, which is to be presented
to the user. The main entry point will catch and display this.
"""
pass
def Load(build_files, format, default_variables={}, def Load(build_files, format, default_variables={},
includes=[], depth='.', params=None, check=False, circular_check=True): includes=[], depth='.', params=None, check=False,
circular_check=True):
""" """
Loads one or more specified build files. Loads one or more specified build files.
default_variables and includes will be copied before use. default_variables and includes will be copied before use.
@ -130,7 +125,8 @@ def Load(build_files, format, default_variables={},
# Process the input specific to this generator. # Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:], result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check) depth, generator_input_info, check, circular_check,
params['parallel'])
return [generator] + result return [generator] + result
def NameValueListToDict(name_value_list): def NameValueListToDict(name_value_list):
@ -317,9 +313,14 @@ def gyp_main(args):
help='do not read options from environment variables') help='do not read options from environment variables')
parser.add_option('--check', dest='check', action='store_true', parser.add_option('--check', dest='check', action='store_true',
help='check format of gyp files') help='check format of gyp files')
parser.add_option('--parallel', action='store_true',
env_name='GYP_PARALLEL',
help='Use multiprocessing for speed (experimental)')
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path', default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree') help='directory to use as the root of the source tree')
parser.add_option('--build', dest='configs', action='append',
help='configuration for build after project generation')
# --no-circular-check disables the check for circular relationships between # --no-circular-check disables the check for circular relationships between
# .gyp files. These relationships should not exist, but they've only been # .gyp files. These relationships should not exist, but they've only been
# observed to be harmful with the Xcode generator. Chromium's .gyp files # observed to be harmful with the Xcode generator. Chromium's .gyp files
@ -374,6 +375,9 @@ def gyp_main(args):
if g_o: if g_o:
options.generator_output = g_o options.generator_output = g_o
if not options.parallel and options.use_environment:
options.parallel = bool(os.environ.get('GYP_PARALLEL'))
for mode in options.debug: for mode in options.debug:
gyp.debug[mode] = 1 gyp.debug[mode] = 1
@ -484,7 +488,8 @@ def gyp_main(args):
'cwd': os.getcwd(), 'cwd': os.getcwd(),
'build_files_arg': build_files_arg, 'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0], 'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp} 'home_dot_gyp': home_dot_gyp,
'parallel': options.parallel}
# Start with the default variables from the command line. # Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(build_files, format, [generator, flat_list, targets, data] = Load(build_files, format,
@ -502,6 +507,13 @@ def gyp_main(args):
# generate targets in the order specified in flat_list. # generate targets in the order specified in flat_list.
generator.GenerateOutput(flat_list, targets, data, params) generator.GenerateOutput(flat_list, targets, data, params)
if options.configs:
valid_configs = targets[flat_list[0]]['configurations'].keys()
for conf in options.configs:
if conf not in valid_configs:
raise GypError('Invalid config specified via --build: %s' % conf)
generator.PerformBuild(data, options.configs, params)
# Done # Done
return 0 return 0

26
tools/gyp/pylib/gyp/common.py

@ -27,6 +27,13 @@ class memoize(object):
return result return result
class GypError(Exception):
"""Error class representing an error, which is to be presented
to the user. The main entry point will catch and display this.
"""
pass
def ExceptionAppend(e, msg): def ExceptionAppend(e, msg):
"""Append a message to the given exception's message.""" """Append a message to the given exception's message."""
if not e.args: if not e.args:
@ -361,13 +368,20 @@ def GetFlavor(params):
'cygwin': 'win', 'cygwin': 'win',
'win32': 'win', 'win32': 'win',
'darwin': 'mac', 'darwin': 'mac',
'sunos5': 'solaris',
'freebsd7': 'freebsd',
'freebsd8': 'freebsd',
'freebsd9': 'freebsd',
} }
flavor = flavors.get(sys.platform, 'linux')
return params.get('flavor', flavor) if 'flavor' in params:
return params['flavor']
if sys.platform in flavors:
return flavors[sys.platform]
if sys.platform.startswith('sunos'):
return 'solaris'
if sys.platform.startswith('freebsd'):
return 'freebsd'
if sys.platform.startswith('dragonfly'):
return 'dragonflybsd'
return 'linux'
def CopyTool(flavor, out_path): def CopyTool(flavor, out_path):

28
tools/gyp/pylib/gyp/common_test.py

@ -8,6 +8,7 @@
import gyp.common import gyp.common
import unittest import unittest
import sys
class TestTopologicallySorted(unittest.TestCase): class TestTopologicallySorted(unittest.TestCase):
@ -40,5 +41,32 @@ class TestTopologicallySorted(unittest.TestCase):
graph.keys(), GetEdge) graph.keys(), GetEdge)
class TestGetFlavor(unittest.TestCase):
"""Test that gyp.common.GetFlavor works as intended"""
original_platform = ''
def setUp(self):
self.original_platform = sys.platform
def tearDown(self):
sys.platform = self.original_platform
def assertFlavor(self, expected, argument, param):
sys.platform = argument
self.assertEqual(expected, gyp.common.GetFlavor(param))
def test_platform_default(self):
self.assertFlavor('dragonflybsd', 'dragonfly3', {})
self.assertFlavor('freebsd' , 'freebsd9' , {})
self.assertFlavor('freebsd' , 'freebsd10' , {})
self.assertFlavor('solaris' , 'sunos5' , {});
self.assertFlavor('solaris' , 'sunos' , {});
self.assertFlavor('linux' , 'linux2' , {});
self.assertFlavor('linux' , 'linux3' , {});
def test_param(self):
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

27
tools/gyp/pylib/gyp/generator/android.py

@ -38,12 +38,22 @@ generator_default_variables = {
'RULE_INPUT_PATH': '$(RULE_SOURCES)', 'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)', 'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)', 'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': 'NOT_USED_ON_ANDROID',
} }
# Make supports multiple toolsets # Make supports multiple toolsets
generator_supports_multiple_toolsets = True generator_supports_multiple_toolsets = True
# Generator-specific gyp specs.
generator_additional_non_configuration_keys = [
# Boolean to declare that this target does not want its name mangled.
'android_unmangled_name',
]
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
SHARED_FOOTER = """\ SHARED_FOOTER = """\
# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from # "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
# all the included sub-makefiles. This is just here to clarify. # all the included sub-makefiles. This is just here to clarify.
@ -153,7 +163,7 @@ class AndroidMkWriter(object):
extra_outputs = [] extra_outputs = []
extra_sources = [] extra_sources = []
self.android_class = MODULE_CLASSES.get(self.type, 'NONE') self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
self.android_module = self.ComputeAndroidModule(spec) self.android_module = self.ComputeAndroidModule(spec)
(self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
self.output = self.output_binary = self.ComputeOutput(spec) self.output = self.output_binary = self.ComputeOutput(spec)
@ -576,6 +586,10 @@ class AndroidMkWriter(object):
distinguish gyp-generated module names. distinguish gyp-generated module names.
""" """
if int(spec.get('android_unmangled_name', 0)):
assert self.type != 'shared_library' or self.target.startswith('lib')
return self.target
if self.type == 'shared_library': if self.type == 'shared_library':
# For reasons of convention, the Android build system requires that all # For reasons of convention, the Android build system requires that all
# shared library modules are named 'libfoo' when generating -l flags. # shared library modules are named 'libfoo' when generating -l flags.
@ -838,10 +852,11 @@ class AndroidMkWriter(object):
# Add an alias from the gyp target name to the Android module name. This # Add an alias from the gyp target name to the Android module name. This
# simplifies manual builds of the target, and is required by the test # simplifies manual builds of the target, and is required by the test
# framework. # framework.
self.WriteLn('# Alias gyp target name.') if self.target != self.android_module:
self.WriteLn('.PHONY: %s' % self.target) self.WriteLn('# Alias gyp target name.')
self.WriteLn('%s: %s' % (self.target, self.android_module)) self.WriteLn('.PHONY: %s' % self.target)
self.WriteLn('') self.WriteLn('%s: %s' % (self.target, self.android_module))
self.WriteLn('')
# Add the command to trigger build of the target type depending # Add the command to trigger build of the target type depending
# on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
@ -989,7 +1004,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
default_configuration = 'Default' default_configuration = 'Default'
srcdir = '.' srcdir = '.'
makefile_name = 'GypAndroid.mk' + options.suffix makefile_name = 'GypAndroid' + options.suffix + '.mk'
makefile_path = os.path.join(options.toplevel_dir, makefile_name) makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, ( assert not options.generator_output, (
'The Android backend does not support options.generator_output.') 'The Android backend does not support options.generator_output.')

31
tools/gyp/pylib/gyp/generator/dump_dependency_json.py

@ -1,10 +1,12 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
import collections import collections
import os
import gyp import gyp
import gyp.common import gyp.common
import gyp.msvs_emulation
import json import json
import sys import sys
@ -22,7 +24,8 @@ for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX']: 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = '' generator_default_variables[unused] = ''
@ -32,6 +35,30 @@ def CalculateVariables(default_variables, params):
default_variables.setdefault(key, val) default_variables.setdefault(key, val)
default_variables.setdefault('OS', gyp.common.GetFlavor(params)) default_variables.setdefault('OS', gyp.common.GetFlavor(params))
flavor = gyp.common.GetFlavor(params)
if flavor =='win':
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
def CalculateGeneratorInputInfo(params): def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by """Calculate the generator specific info that gets fed to input (called by

114
tools/gyp/pylib/gyp/generator/make.py

@ -24,9 +24,9 @@
import os import os
import re import re
import sys import sys
import subprocess
import gyp import gyp
import gyp.common import gyp.common
import gyp.system_test
import gyp.xcode_emulation import gyp.xcode_emulation
from gyp.common import GetEnvironFallback from gyp.common import GetEnvironFallback
@ -125,7 +125,10 @@ SPACE_REPLACEMENT = '?'
LINK_COMMANDS_LINUX = """\ LINK_COMMANDS_LINUX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@ quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %.o,$^) cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the # Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire # special "figure out circular dependencies" flags around the entire
@ -158,7 +161,7 @@ cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSE
LINK_COMMANDS_MAC = """\ LINK_COMMANDS_MAC = """\
quiet_cmd_alink = LIBTOOL-STATIC $@ quiet_cmd_alink = LIBTOOL-STATIC $@
cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool -static -o $@ $(filter %.o,$^) cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@ quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
@ -176,7 +179,10 @@ cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSE
LINK_COMMANDS_ANDROID = """\ LINK_COMMANDS_ANDROID = """\
quiet_cmd_alink = AR($(TOOLSET)) $@ quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) $(ARFLAGS.$(TOOLSET)) $@ $(filter %.o,$^) cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the # Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire # special "figure out circular dependencies" flags around the entire
@ -262,10 +268,7 @@ CXXFLAGS.target ?= $(CXXFLAGS)
LINK.target ?= %(LINK.target)s LINK.target ?= %(LINK.target)s
LDFLAGS.target ?= $(LDFLAGS) LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR) AR.target ?= $(AR)
ARFLAGS.target ?= %(ARFLAGS.target)s
# N.B.: the logic of which commands to run should match the computation done
# in gyp's make.py where ARFLAGS.host etc. is computed.
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need # TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well. # to replicate this environment fallback in make as well.
CC.host ?= %(CC.host)s CC.host ?= %(CC.host)s
@ -275,7 +278,6 @@ CXXFLAGS.host ?=
LINK.host ?= %(LINK.host)s LINK.host ?= %(LINK.host)s
LDFLAGS.host ?= LDFLAGS.host ?=
AR.host ?= %(AR.host)s AR.host ?= %(AR.host)s
ARFLAGS.host := %(ARFLAGS.host)s
# Define a dir function that can handle spaces. # Define a dir function that can handle spaces.
# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions # http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
@ -721,9 +723,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
else: else:
self.output = self.output_binary = self.ComputeOutput(spec) self.output = self.output_binary = self.ComputeOutput(spec)
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
'shared_library') 'shared_library')
if self.type in self._INSTALLABLE_TARGETS: if (self.is_standalone_static_library or
self.type in self._INSTALLABLE_TARGETS):
self.alias = os.path.basename(self.output) self.alias = os.path.basename(self.output)
install_path = self._InstallableTargetInstallPath() install_path = self._InstallableTargetInstallPath()
else: else:
@ -838,6 +843,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
actions) actions)
part_of_all: flag indicating this target is part of 'all' part_of_all: flag indicating this target is part of 'all'
""" """
env = self.GetSortedXcodeEnv()
for action in actions: for action in actions:
name = StringToMakefileVariable('%s_%s' % (self.qualified_target, name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
action['action_name'])) action['action_name']))
@ -858,7 +864,11 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
extra_mac_bundle_resources += outputs extra_mac_bundle_resources += outputs
# Write the actual command. # Write the actual command.
command = gyp.common.EncodePOSIXShellList(action['action']) action_commands = action['action']
if self.flavor == 'mac':
action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
for command in action_commands]
command = gyp.common.EncodePOSIXShellList(action_commands)
if 'message' in action: if 'message' in action:
self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message'])) self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
else: else:
@ -907,7 +917,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"Spaces in action output filenames not supported (%s)" % output) "Spaces in action output filenames not supported (%s)" % output)
# See the comment in WriteCopies about expanding env vars. # See the comment in WriteCopies about expanding env vars.
env = self.GetSortedXcodeEnv()
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
@ -933,6 +942,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
rules (used to make other pieces dependent on these rules) rules (used to make other pieces dependent on these rules)
part_of_all: flag indicating this target is part of 'all' part_of_all: flag indicating this target is part of 'all'
""" """
env = self.GetSortedXcodeEnv()
for rule in rules: for rule in rules:
name = StringToMakefileVariable('%s_%s' % (self.qualified_target, name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
rule['rule_name'])) rule['rule_name']))
@ -972,6 +982,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# amount of pain. # amount of pain.
actions += ['@touch --no-create $@'] actions += ['@touch --no-create $@']
# See the comment in WriteCopies about expanding env vars.
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
outputs = map(self.Absolutify, outputs) outputs = map(self.Absolutify, outputs)
all_outputs += outputs all_outputs += outputs
# Only write the 'obj' and 'builddir' rules for the "primary" output # Only write the 'obj' and 'builddir' rules for the "primary" output
@ -996,6 +1010,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# action, cd_action, and mkdirs get written to a toplevel variable # action, cd_action, and mkdirs get written to a toplevel variable
# called cmd_foo. Toplevel variables can't handle things that change # called cmd_foo. Toplevel variables can't handle things that change
# per makefile like $(TARGET), so hardcode the target. # per makefile like $(TARGET), so hardcode the target.
if self.flavor == 'mac':
action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
for command in action]
action = gyp.common.EncodePOSIXShellList(action) action = gyp.common.EncodePOSIXShellList(action)
action = action.replace('$(TARGET)', self.target) action = action.replace('$(TARGET)', self.target)
cd_action = cd_action.replace('$(TARGET)', self.target) cd_action = cd_action.replace('$(TARGET)', self.target)
@ -1049,7 +1066,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
outputs = [] outputs = []
for copy in copies: for copy in copies:
for path in copy['files']: for path in copy['files']:
# Absolutify() calls normpath, stripping trailing slashes. # Absolutify() may call normpath, and will strip trailing slashes.
path = Sourceify(self.Absolutify(path)) path = Sourceify(self.Absolutify(path))
filename = os.path.split(path)[1] filename = os.path.split(path)[1]
output = Sourceify(self.Absolutify(os.path.join(copy['destination'], output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
@ -1419,6 +1436,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' % ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
self.toolset) self.toolset)
self.WriteList(ldflags, 'LDFLAGS_%s' % configname) self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
if self.flavor == 'mac':
self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
'LIBTOOLFLAGS_%s' % configname)
libraries = spec.get('libraries') libraries = spec.get('libraries')
if libraries: if libraries:
# Remove duplicate entries # Remove duplicate entries
@ -1430,6 +1450,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
QuoteSpaces(self.output_binary)) QuoteSpaces(self.output_binary))
self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary)) self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
if self.flavor == 'mac':
self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
QuoteSpaces(self.output_binary))
# Postbuild actions. Like actions, but implicitly depend on the target's # Postbuild actions. Like actions, but implicitly depend on the target's
# output. # output.
postbuilds = [] postbuilds = []
@ -1517,8 +1541,13 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
for link_dep in link_deps: for link_dep in link_deps:
assert ' ' not in link_dep, ( assert ' ' not in link_dep, (
"Spaces in alink input filenames not supported (%s)" % link_dep) "Spaces in alink input filenames not supported (%s)" % link_dep)
self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, if (self.flavor not in ('mac', 'win') and not
postbuilds=postbuilds) self.is_standalone_static_library):
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
postbuilds=postbuilds)
elif self.type == 'shared_library': elif self.type == 'shared_library':
self.WriteLn('%s: LD_INPUTS := %s' % ( self.WriteLn('%s: LD_INPUTS := %s' % (
QuoteSpaces(self.output_binary), QuoteSpaces(self.output_binary),
@ -1558,9 +1587,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# 1) They need to install to the build dir or "product" dir. # 1) They need to install to the build dir or "product" dir.
# 2) They get shortcuts for building (e.g. "make chrome"). # 2) They get shortcuts for building (e.g. "make chrome").
# 3) They are part of "make all". # 3) They are part of "make all".
if self.type in self._INSTALLABLE_TARGETS: if (self.type in self._INSTALLABLE_TARGETS or
self.is_standalone_static_library):
if self.type == 'shared_library': if self.type == 'shared_library':
file_desc = 'shared library' file_desc = 'shared library'
elif self.type == 'static_library':
file_desc = 'static library'
else: else:
file_desc = 'executable' file_desc = 'executable'
install_path = self._InstallableTargetInstallPath() install_path = self._InstallableTargetInstallPath()
@ -1830,9 +1862,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"""Convert a subdirectory-relative path into a base-relative path. """Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables.""" Skips over paths that contain variables."""
if '$(' in path: if '$(' in path:
# path is no existing file in this case, but calling normpath is still # Don't call normpath in this case, as it might collapse the
# important for trimming trailing slashes. # path too aggressively if it features '..'. However it's still
return os.path.normpath(path) # important to strip trailing slashes.
return path.rstrip('/')
return os.path.normpath(os.path.join(self.path, path)) return os.path.normpath(os.path.join(self.path, path))
@ -1881,39 +1914,15 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
build_files_args)}) build_files_args)})
def RunSystemTests(flavor): def PerformBuild(data, configurations, params):
"""Run tests against the system to compute default settings for commands. options = params['options']
for config in configurations:
Returns: arguments = ['make']
dictionary of settings matching the block of command-lines used in if options.toplevel_dir and options.toplevel_dir != '.':
SHARED_HEADER. E.g. the dictionary will contain a ARFLAGS.target arguments += '-C', options.toplevel_dir
key for the default ARFLAGS for the target ar command. arguments.append('BUILDTYPE=' + config)
""" print 'Building [%s]: %s' % (config, arguments)
# Compute flags used for building static archives. subprocess.check_call(arguments)
# N.B.: this fallback logic should match the logic in SHARED_HEADER.
# See comment there for more details.
ar_target = GetEnvironFallback(('AR_target', 'AR'), 'ar')
cc_target = GetEnvironFallback(('CC_target', 'CC'), 'cc')
arflags_target = 'crs'
# ar -T enables thin archives on Linux. OS X's ar supports a -T flag, but it
# does something useless (it limits filenames in the archive to 15 chars).
if flavor != 'mac' and gyp.system_test.TestArSupportsT(ar_command=ar_target,
cc_command=cc_target):
arflags_target = 'crsT'
ar_host = os.environ.get('AR_host', 'ar')
cc_host = os.environ.get('CC_host', 'gcc')
arflags_host = 'crs'
# It feels redundant to compute this again given that most builds aren't
# cross-compiles, but due to quirks of history CC_host defaults to 'gcc'
# while CC_target defaults to 'cc', so the commands really are different
# even though they're nearly guaranteed to run the same code underneath.
if flavor != 'mac' and gyp.system_test.TestArSupportsT(ar_command=ar_host,
cc_command=cc_host):
arflags_host = 'crsT'
return { 'ARFLAGS.target': arflags_target,
'ARFLAGS.host': arflags_host }
def GenerateOutput(target_list, target_dicts, data, params): def GenerateOutput(target_list, target_dicts, data, params):
@ -1991,12 +2000,11 @@ def GenerateOutput(target_list, target_dicts, data, params):
'flock_index': 2, 'flock_index': 2,
'extra_commands': SHARED_HEADER_SUN_COMMANDS, 'extra_commands': SHARED_HEADER_SUN_COMMANDS,
}) })
elif flavor == 'freebsd': elif flavor == 'freebsd' or flavor == 'dragonflybsd':
header_params.update({ header_params.update({
'flock': 'lockf', 'flock': 'lockf',
}) })
header_params.update(RunSystemTests(flavor))
header_params.update({ header_params.update({
'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),

46
tools/gyp/pylib/gyp/generator/msvs.py

@ -18,6 +18,7 @@ import gyp.MSVSSettings as MSVSSettings
import gyp.MSVSToolFile as MSVSToolFile import gyp.MSVSToolFile as MSVSToolFile
import gyp.MSVSUserFile as MSVSUserFile import gyp.MSVSUserFile as MSVSUserFile
import gyp.MSVSVersion as MSVSVersion import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
# Regular expression for validating Visual Studio GUIDs. If the GUID # Regular expression for validating Visual Studio GUIDs. If the GUID
@ -1026,7 +1027,7 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
# Get the information for this configuration # Get the information for this configuration
include_dirs, resource_include_dirs = _GetIncludeDirs(config) include_dirs, resource_include_dirs = _GetIncludeDirs(config)
libraries = _GetLibraries(spec) libraries = _GetLibraries(spec)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec) out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
defines = _GetDefines(config) defines = _GetDefines(config)
defines = [_EscapeCppDefineForMSVS(d) for d in defines] defines = [_EscapeCppDefineForMSVS(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(config) disabled_warnings = _GetDisabledWarnings(config)
@ -1123,6 +1124,8 @@ def _GetLibraries(spec):
unique_libraries_list = [] unique_libraries_list = []
for entry in reversed(libraries): for entry in reversed(libraries):
library = re.sub('^\-l', '', entry) library = re.sub('^\-l', '', entry)
if not os.path.splitext(library)[1]:
library += '.lib'
if library not in found: if library not in found:
found.add(library) found.add(library)
unique_libraries_list.append(library) unique_libraries_list.append(library)
@ -1130,7 +1133,7 @@ def _GetLibraries(spec):
return unique_libraries_list return unique_libraries_list
def _GetOutputFilePathAndTool(spec): def _GetOutputFilePathAndTool(spec, msbuild):
"""Returns the path and tool to use for this target. """Returns the path and tool to use for this target.
Figures out the path of the file this spec will create and the name of Figures out the path of the file this spec will create and the name of
@ -1154,10 +1157,14 @@ def _GetOutputFilePathAndTool(spec):
output_file_props = output_file_map.get(spec['type']) output_file_props = output_file_map.get(spec['type'])
if output_file_props and int(spec.get('msvs_auto_output_file', 1)): if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
vc_tool, msbuild_tool, out_dir, suffix = output_file_props vc_tool, msbuild_tool, out_dir, suffix = output_file_props
if spec.get('standalone_static_library', 0):
out_dir = '$(OutDir)'
out_dir = spec.get('product_dir', out_dir) out_dir = spec.get('product_dir', out_dir)
product_extension = spec.get('product_extension') product_extension = spec.get('product_extension')
if product_extension: if product_extension:
suffix = '.' + product_extension suffix = '.' + product_extension
elif msbuild:
suffix = '$(TargetExt)'
prefix = spec.get('product_prefix', '') prefix = spec.get('product_prefix', '')
product_name = spec.get('product_name', '$(ProjectName)') product_name = spec.get('product_name', '$(ProjectName)')
out_file = ntpath.join(out_dir, prefix + product_name + suffix) out_file = ntpath.join(out_dir, prefix + product_name + suffix)
@ -1666,7 +1673,7 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
build_file = gyp.common.BuildFile(qualified_target) build_file = gyp.common.BuildFile(qualified_target)
# Create object for this project. # Create object for this project.
obj = MSVSNew.MSVSProject( obj = MSVSNew.MSVSProject(
_FixPath(proj_path), proj_path,
name=spec['target_name'], name=spec['target_name'],
guid=guid, guid=guid,
spec=spec, spec=spec,
@ -1779,6 +1786,25 @@ def _ShardTargets(target_list, target_dicts):
return (new_target_list, new_target_dicts) return (new_target_list, new_target_dicts)
def PerformBuild(data, configurations, params):
options = params['options']
msvs_version = params['msvs_version']
devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
sln_path = build_file_root + options.suffix + '.sln'
if options.generator_output:
sln_path = os.path.join(options.generator_output, sln_path)
for config in configurations:
arguments = [devenv, sln_path, '/Build', config]
print 'Building [%s]: %s' % (config, arguments)
rtn = subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params): def GenerateOutput(target_list, target_dicts, data, params):
"""Generate .sln and .vcproj files. """Generate .sln and .vcproj files.
@ -2571,13 +2597,13 @@ def _GetMSBuildAttributes(spec, config, build_file):
config_type = _GetMSVSConfigurationType(spec, build_file) config_type = _GetMSVSConfigurationType(spec, build_file)
config_type = _ConvertMSVSConfigurationType(config_type) config_type = _ConvertMSVSConfigurationType(config_type)
msbuild_attributes = config.get('msbuild_configuration_attributes', {}) msbuild_attributes = config.get('msbuild_configuration_attributes', {})
msbuild_attributes['ConfigurationType'] = config_type msbuild_attributes.setdefault('ConfigurationType', config_type)
output_dir = msbuild_attributes.get('OutputDirectory', output_dir = msbuild_attributes.get('OutputDirectory',
'$(SolutionDir)$(Configuration)\\') '$(SolutionDir)$(Configuration)')
msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
if 'IntermediateDirectory' not in msbuild_attributes: if 'IntermediateDirectory' not in msbuild_attributes:
intermediate = '$(Configuration)\\' intermediate = _FixPath('$(Configuration)') + '\\'
msbuild_attributes['IntermediateDirectory'] = _FixPath(intermediate) msbuild_attributes['IntermediateDirectory'] = intermediate
if 'CharacterSet' in msbuild_attributes: if 'CharacterSet' in msbuild_attributes:
msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet( msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
msbuild_attributes['CharacterSet']) msbuild_attributes['CharacterSet'])
@ -2754,7 +2780,7 @@ def _FinalizeMSBuildSettings(spec, configuration):
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
include_dirs, resource_include_dirs = _GetIncludeDirs(configuration) include_dirs, resource_include_dirs = _GetIncludeDirs(configuration)
libraries = _GetLibraries(spec) libraries = _GetLibraries(spec)
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec) out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
defines = _GetDefines(configuration) defines = _GetDefines(configuration)
if converted: if converted:
# Visual Studio 2010 has TR1 # Visual Studio 2010 has TR1
@ -3009,7 +3035,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
extension_to_rule_name) extension_to_rule_name)
missing_sources = _VerifySourcesExist(sources, project_dir) missing_sources = _VerifySourcesExist(sources, project_dir)
for (_, configuration) in configurations.iteritems(): for configuration in configurations.itervalues():
_FinalizeMSBuildSettings(spec, configuration) _FinalizeMSBuildSettings(spec, configuration)
# Add attributes to root element # Add attributes to root element

6
tools/gyp/pylib/gyp/generator/msvs_test.py

@ -1,6 +1,5 @@
#!/usr/bin/env python #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -26,6 +25,9 @@ class TestSequenceFunctions(unittest.TestCase):
self.assertEqual( self.assertEqual(
msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}), msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
['a.lib']) ['a.lib'])
self.assertEqual(
msvs._GetLibraries({'libraries': ['-la']}),
['a.lib'])
self.assertEqual( self.assertEqual(
msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib', msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
'-lb.lib', 'd.lib', 'a.lib']}), '-lb.lib', 'd.lib', 'a.lib']}),

223
tools/gyp/pylib/gyp/generator/ninja.py

@ -4,15 +4,16 @@
import copy import copy
import hashlib import hashlib
import multiprocessing
import os.path import os.path
import re import re
import signal
import subprocess import subprocess
import sys import sys
import gyp import gyp
import gyp.common import gyp.common
import gyp.msvs_emulation import gyp.msvs_emulation
import gyp.MSVSVersion import gyp.MSVSVersion
import gyp.system_test
import gyp.xcode_emulation import gyp.xcode_emulation
from gyp.common import GetEnvironFallback from gyp.common import GetEnvironFallback
@ -354,7 +355,8 @@ class NinjaWriter:
self.ninja.newline() self.ninja.newline()
return targets[0] return targets[0]
def WriteSpec(self, spec, config_name, generator_flags): def WriteSpec(self, spec, config_name, generator_flags,
case_sensitive_filesystem):
"""The main entry point for NinjaWriter: write the build rules for a spec. """The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec. Returns a Target object, which represents the output paths for this spec.
@ -366,6 +368,8 @@ class NinjaWriter:
self.toolset = spec['toolset'] self.toolset = spec['toolset']
config = spec['configurations'][config_name] config = spec['configurations'][config_name]
self.target = Target(spec['type']) self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None self.xcode_settings = self.msvs_settings = None
@ -374,8 +378,8 @@ class NinjaWriter:
if self.flavor == 'win': if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags) generator_flags)
target_platform = self.msvs_settings.GetTargetPlatform(config_name) arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[target_platform]) self.ninja.variable('arch', self.win_env[arch])
# Compute predepends for all rules. # Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running # actions_depends is the dependencies this target depends on before running
@ -421,6 +425,8 @@ class NinjaWriter:
if sources: if sources:
pch = None pch = None
if self.flavor == 'win': if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader( pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja) self.msvs_settings, config_name, self.GypPathToNinja)
else: else:
@ -428,7 +434,8 @@ class NinjaWriter:
self.xcode_settings, self.GypPathToNinja, self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources( link_deps = self.WriteSources(
config_name, config, sources, compile_depends_stamp, pch) config_name, config, sources, compile_depends_stamp, pch,
case_sensitive_filesystem, spec)
# Some actions/rules output 'sources' that are already object files. # Some actions/rules output 'sources' that are already object files.
link_deps += [self.GypPathToNinja(f) link_deps += [self.GypPathToNinja(f)
for f in sources if f.endswith(self.obj_ext)] for f in sources if f.endswith(self.obj_ext)]
@ -502,7 +509,7 @@ class NinjaWriter:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
extra_mac_bundle_resources) extra_mac_bundle_resources)
if 'copies' in spec: if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild) outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win': if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild) outputs += self.WriteWinIdlFiles(spec, prebuild)
@ -549,11 +556,8 @@ class NinjaWriter:
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action) is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False) if self.flavor == 'win' else False)
args = action['action'] args = action['action']
args = [self.msvs_settings.ConvertVSMacros( rule_name, _ = self.WriteNewNinjaRule(name, args, description,
arg, self.base_to_build, config=self.config_name) is_cygwin, env=env)
for arg in args] if self.flavor == 'win' else args
rule_name = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env=env)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']] inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)): if int(action.get('process_outputs_as_sources', False)):
@ -573,6 +577,7 @@ class NinjaWriter:
def WriteRules(self, rules, extra_sources, prebuild, def WriteRules(self, rules, extra_sources, prebuild,
extra_mac_bundle_resources): extra_mac_bundle_resources):
env = self.GetSortedXcodeEnv()
all_outputs = [] all_outputs = []
for rule in rules: for rule in rules:
# First write out a rule for the rule action. # First write out a rule for the rule action.
@ -588,10 +593,8 @@ class NinjaWriter:
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule) is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False) if self.flavor == 'win' else False)
args = [self.msvs_settings.ConvertVSMacros( rule_name, args = self.WriteNewNinjaRule(
arg, self.base_to_build, config=self.config_name) name, args, description, is_cygwin, env=env)
for arg in args] if self.flavor == 'win' else args
rule_name = self.WriteNewNinjaRule(name, args, description, is_cygwin)
# TODO: if the command references the outputs directly, we should # TODO: if the command references the outputs directly, we should
# simplify it to just use $out. # simplify it to just use $out.
@ -648,10 +651,10 @@ class NinjaWriter:
else: else:
assert var == None, repr(var) assert var == None, repr(var)
inputs = map(self.GypPathToNinja, inputs) inputs = [self.GypPathToNinja(i, env) for i in inputs]
outputs = map(self.GypPathToNinja, outputs) outputs = [self.GypPathToNinja(o, env) for o in outputs]
extra_bindings.append(('unique_name', extra_bindings.append(('unique_name',
re.sub('[^a-zA-Z0-9_]', '_', outputs[0]))) hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs, implicit=inputs,
order_only=prebuild, order_only=prebuild,
@ -661,7 +664,7 @@ class NinjaWriter:
return all_outputs return all_outputs
def WriteCopies(self, copies, prebuild): def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = [] outputs = []
env = self.GetSortedXcodeEnv() env = self.GetSortedXcodeEnv()
for copy in copies: for copy in copies:
@ -673,6 +676,15 @@ class NinjaWriter:
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename), dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env) env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs return outputs
@ -709,7 +721,7 @@ class NinjaWriter:
bundle_depends.append(out) bundle_depends.append(out)
def WriteSources(self, config_name, config, sources, predepends, def WriteSources(self, config_name, config, sources, predepends,
precompiled_header): precompiled_header, case_sensitive_filesystem, spec):
"""Write build rules to compile all of |sources|.""" """Write build rules to compile all of |sources|."""
if self.toolset == 'host': if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host') self.ninja.variable('ar', '$ar_host')
@ -781,10 +793,13 @@ class NinjaWriter:
obj_ext = self.obj_ext obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'): if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx' command = 'cxx'
elif ext == 'c' or (ext in ('s', 'S') and self.flavor != 'win'): elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc' command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and elif (self.flavor == 'win' and ext == 'asm' and
self.msvs_settings.GetTargetPlatform(config_name) == 'Win32'): self.msvs_settings.GetArch(config_name) == 'x86' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
# Asm files only get auto assembled for x86 (not x64). # Asm files only get auto assembled for x86 (not x64).
command = 'asm' command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and # Add the _asm suffix as msvs is capable of handling .cc and
@ -802,6 +817,12 @@ class NinjaWriter:
continue continue
input = self.GypPathToNinja(source) input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext) output = self.GypPathToUniqueOutput(filename + obj_ext)
# Ninja's depfile handling gets confused when the case of a filename
# changes on a case-insensitive file system. To work around that, always
# convert .o filenames to lowercase on such file systems. See
# https://github.com/martine/ninja/issues/402 for details.
if not case_sensitive_filesystem:
output = output.lower()
implicit = precompiled_header.GetObjDependencies([input], [output]) implicit = precompiled_header.GetObjDependencies([input], [output])
self.ninja.build(output, command, input, self.ninja.build(output, command, input,
implicit=[gch for _, _, gch in implicit], implicit=[gch for _, _, gch in implicit],
@ -918,10 +939,12 @@ class NinjaWriter:
extra_bindings.append(('lib', extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output))) gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor == 'win': if self.flavor == 'win':
self.target.import_lib = output + '.lib'
extra_bindings.append(('dll', output)) extra_bindings.append(('dll', output))
extra_bindings.append(('implib', self.target.import_lib)) if '/NOENTRY' not in ldflags:
output = [output, self.target.import_lib] self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
output = [output, self.target.import_lib]
else: else:
output = [output, output + '.TOC'] output = [output, output + '.TOC']
@ -939,10 +962,21 @@ class NinjaWriter:
self.target.binary = compile_deps self.target.binary = compile_deps
elif spec['type'] == 'static_library': elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec) self.target.binary = self.ComputeOutput(spec)
self.ninja.build(self.target.binary, 'alink', link_deps, variables = []
order_only=compile_deps, postbuild = self.GetPostbuildCommand(
variables=[('postbuilds', self.GetPostbuildCommand( spec, self.target.binary, self.target.binary)
spec, self.target.binary, self.target.binary))]) if postbuild:
variables.append(('postbuilds', postbuild))
if self.xcode_settings:
variables.append(('libtool_flags',
self.xcode_settings.GetLibtoolflags(config_name)))
if (self.flavor not in ('mac', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps, variables=variables)
else:
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else: else:
self.WriteLink(spec, config_name, config, link_deps) self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary return self.target.binary
@ -1126,7 +1160,7 @@ class NinjaWriter:
elif self.flavor == 'win' and self.toolset == 'target': elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library'] type_in_output_root += ['shared_library']
if type in type_in_output_root: if type in type_in_output_root or self.is_standalone_static_library:
return filename return filename
elif type == 'shared_library': elif type == 'shared_library':
libdir = 'lib' libdir = 'lib'
@ -1142,10 +1176,22 @@ class NinjaWriter:
values = [] values = []
self.ninja.variable(var, ' '.join(values)) self.ninja.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env={}): def WriteNewNinjaRule(self, name, args, description, is_cygwin, env):
"""Write out a new ninja "rule" statement for a given command. """Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule.""" Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because # TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really # currently the ninja rule namespace is global, but it really
@ -1156,11 +1202,12 @@ class NinjaWriter:
rule_name += '.' + name rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
args = args[:] # Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
if self.flavor == 'win': # actions, not just for rules where they are valid. Good enough.
description = self.msvs_settings.ConvertVSMacros( protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
description, config=self.config_name) protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory. # gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in # cd into the directory before running, and adjust paths in
@ -1182,10 +1229,6 @@ class NinjaWriter:
else: else:
env = self.ComputeExportEnvString(env) env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args) command = gyp.common.EncodePOSIXShellList(args)
if env:
# If an environment is passed in, variables in the command should be
# read from it, instead of from ninja's internal variables.
command = ninja_syntax.escape(command)
command = 'cd %s; ' % self.build_to_base + env + command command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs. # GYP rules/actions express being no-ops by not touching their outputs.
@ -1195,7 +1238,7 @@ class NinjaWriter:
rspfile=rspfile, rspfile_content=rspfile_content) rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline() self.ninja.newline()
return rule_name return rule_name, args
def CalculateVariables(default_variables, params): def CalculateVariables(default_variables, params):
@ -1278,16 +1321,26 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
flavor = gyp.common.GetFlavor(params) flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {}) generator_flags = params.get('generator_flags', {})
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
# build_dir: relative path from source root to our output files. # build_dir: relative path from source root to our output files.
# e.g. "out/Debug" # e.g. "out/Debug"
build_dir = os.path.join(generator_flags.get('output_dir', 'out'), build_dir = os.path.normpath(os.path.join(generator_dir,
config_name) output_dir,
config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir) toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja = ninja_syntax.Writer( master_ninja = ninja_syntax.Writer(
OpenOutput(os.path.join(toplevel_build, 'build.ninja')), OpenOutput(os.path.join(toplevel_build, 'build.ninja')),
width=120) width=120)
case_sensitive_filesystem = not os.path.exists(
os.path.join(toplevel_build, 'BUILD.NINJA'))
# Put build-time support tools in out/{config_name}. # Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build) gyp.common.CopyTool(flavor, toplevel_build)
@ -1380,8 +1433,6 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
else: else:
master_ninja.variable('ld_host', flock + ' linker.lock ' + ld_host) master_ninja.variable('ld_host', flock + ' linker.lock ' + ld_host)
if flavor == 'mac':
master_ninja.variable('mac_tool', os.path.join('.', 'gyp-mac-tool'))
master_ninja.newline() master_ninja.newline()
if flavor != 'win': if flavor != 'win':
@ -1391,6 +1442,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'), '$cflags_pch_c -c $in -o $out'),
depfile='$out.d') depfile='$out.d')
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule( master_ninja.rule(
'cxx', 'cxx',
description='CXX $out', description='CXX $out',
@ -1398,19 +1454,17 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'$cflags_pch_cc -c $in -o $out'), '$cflags_pch_cc -c $in -o $out'),
depfile='$out.d') depfile='$out.d')
else: else:
# TODO(scottmg): Requires fork of ninja for dependency and linking
# support: https://github.com/sgraham/ninja
# Template for compile commands mostly shared between compiling files # Template for compile commands mostly shared between compiling files
# and generating PCH. In the case of PCH, the "output" is specified by /Fp # and generating PCH. In the case of PCH, the "output" is specified by /Fp
# rather than /Fo (for object files), but we still need to specify an /Fo # rather than /Fo (for object files), but we still need to specify an /Fo
# when compiling PCH. # when compiling PCH.
cc_template = ('ninja-deplist-helper -r . -q -f cl -o $out.dl -e $arch ' cc_template = ('ninja -t msvc -r . -o $out -e $arch '
'--command ' '-- '
'$cc /nologo /showIncludes /FC ' '$cc /nologo /showIncludes /FC '
'@$out.rsp ' '@$out.rsp '
'$cflags_pch_c /c $in %(outspec)s /Fd$pdbname ') '$cflags_pch_c /c $in %(outspec)s /Fd$pdbname ')
cxx_template = ('ninja-deplist-helper -r . -q -f cl -o $out.dl -e $arch ' cxx_template = ('ninja -t msvc -r . -o $out -e $arch '
'--command ' '-- '
'$cxx /nologo /showIncludes /FC ' '$cxx /nologo /showIncludes /FC '
'@$out.rsp ' '@$out.rsp '
'$cflags_pch_cc /c $in %(outspec)s $pchobj /Fd$pdbname ') '$cflags_pch_cc /c $in %(outspec)s $pchobj /Fd$pdbname ')
@ -1418,28 +1472,28 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'cc', 'cc',
description='CC $out', description='CC $out',
command=cc_template % {'outspec': '/Fo$out'}, command=cc_template % {'outspec': '/Fo$out'},
depfile='$out.dl', depfile='$out.d',
rspfile='$out.rsp', rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c') rspfile_content='$defines $includes $cflags $cflags_c')
master_ninja.rule( master_ninja.rule(
'cc_pch', 'cc_pch',
description='CC PCH $out', description='CC PCH $out',
command=cc_template % {'outspec': '/Fp$out /Fo$out.obj'}, command=cc_template % {'outspec': '/Fp$out /Fo$out.obj'},
depfile='$out.dl', depfile='$out.d',
rspfile='$out.rsp', rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c') rspfile_content='$defines $includes $cflags $cflags_c')
master_ninja.rule( master_ninja.rule(
'cxx', 'cxx',
description='CXX $out', description='CXX $out',
command=cxx_template % {'outspec': '/Fo$out'}, command=cxx_template % {'outspec': '/Fo$out'},
depfile='$out.dl', depfile='$out.d',
rspfile='$out.rsp', rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc') rspfile_content='$defines $includes $cflags $cflags_cc')
master_ninja.rule( master_ninja.rule(
'cxx_pch', 'cxx_pch',
description='CXX PCH $out', description='CXX PCH $out',
command=cxx_template % {'outspec': '/Fp$out /Fo$out.obj'}, command=cxx_template % {'outspec': '/Fp$out /Fo$out.obj'},
depfile='$out.dl', depfile='$out.d',
rspfile='$out.rsp', rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc') rspfile_content='$defines $includes $cflags $cflags_cc')
master_ninja.rule( master_ninja.rule(
@ -1466,6 +1520,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.rule( master_ninja.rule(
'alink', 'alink',
description='AR $out', description='AR $out',
command='rm -f $out && $ar rcs $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $out $in') command='rm -f $out && $ar rcsT $out $in')
# This allows targets that only need to depend on $lib's API to declare an # This allows targets that only need to depend on $lib's API to declare an
@ -1514,7 +1572,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
rspfile_content='$in_newline $libflags') rspfile_content='$in_newline $libflags')
dlldesc = 'LINK(DLL) $dll' dlldesc = 'LINK(DLL) $dll'
dllcmd = ('%s gyp-win-tool link-wrapper $arch ' dllcmd = ('%s gyp-win-tool link-wrapper $arch '
'$ld /nologo /IMPLIB:$implib /DLL /OUT:$dll ' '$ld /nologo $implibflag /DLL /OUT:$dll '
'/PDB:$dll.pdb @$dll.rsp' % sys.executable) '/PDB:$dll.pdb @$dll.rsp' % sys.executable)
dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
'$mt -nologo -manifest $manifests -out:$dll.manifest' % '$mt -nologo -manifest $manifests -out:$dll.manifest' %
@ -1556,7 +1614,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'alink', 'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS', description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && ' command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool -static -o $out $in' './gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds') '$postbuilds')
# Record the public interface of $lib in $lib.TOC. See the corresponding # Record the public interface of $lib in $lib.TOC. See the corresponding
@ -1607,11 +1666,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.rule( master_ninja.rule(
'mac_tool', 'mac_tool',
description='MACTOOL $mactool_cmd $in', description='MACTOOL $mactool_cmd $in',
command='$env $mac_tool $mactool_cmd $in $out') command='$env ./gyp-mac-tool $mactool_cmd $in $out')
master_ninja.rule( master_ninja.rule(
'package_framework', 'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS', description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='$mac_tool package-framework $out $version$postbuilds ' command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out') '&& touch $out')
if flavor == 'win': if flavor == 'win':
master_ninja.rule( master_ninja.rule(
@ -1673,7 +1732,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
flavor, abs_build_dir=abs_build_dir) flavor, abs_build_dir=abs_build_dir)
master_ninja.subninja(output_file) master_ninja.subninja(output_file)
target = writer.WriteSpec(spec, config_name, generator_flags) target = writer.WriteSpec(
spec, config_name, generator_flags, case_sensitive_filesystem)
if target: if target:
if name != target.FinalOutput() and spec['toolset'] == 'target': if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target) target_short_names.setdefault(name, []).append(target)
@ -1694,19 +1754,46 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if all_outputs: if all_outputs:
master_ninja.newline() master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs)) master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default('all') master_ninja.default(generator_flags.get('default_target', 'all'))
def GenerateOutput(target_list, target_dicts, data, params): def PerformBuild(data, configurations, params):
if params['options'].generator_output: options = params['options']
raise NotImplementedError, "--generator_output not implemented for ninja" for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
user_config = params.get('generator_flags', {}).get('config', None) user_config = params.get('generator_flags', {}).get('config', None)
if user_config: if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params, GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config) user_config)
else: else:
config_names = target_dicts[target_list[0]]['configurations'].keys() config_names = target_dicts[target_list[0]]['configurations'].keys()
for config_name in config_names: if params['parallel']:
GenerateOutputForConfig(target_list, target_dicts, data, params, try:
config_name) pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)

25
tools/gyp/pylib/gyp/generator/scons.py

@ -8,6 +8,7 @@ import gyp.SCons as SCons
import os.path import os.path
import pprint import pprint
import re import re
import subprocess
# TODO: remove when we delete the last WriteList() call in this module # TODO: remove when we delete the last WriteList() call in this module
@ -960,6 +961,30 @@ def TargetFilename(target, build_file=None, output_suffix=''):
return output_file return output_file
def PerformBuild(data, configurations, params):
options = params['options']
# Due to the way we test gyp on the chromium typbots
# we need to look for 'scons.py' as well as the more common 'scons'
# TODO(sbc): update the trybots to have a more normal install
# of scons.
scons = 'scons'
paths = os.environ['PATH'].split(os.pathsep)
for scons_name in ['scons', 'scons.py']:
for path in paths:
test_scons = os.path.join(path, scons_name)
print 'looking for: %s' % test_scons
if os.path.exists(test_scons):
print "found scons: %s" % scons
scons = test_scons
break
for config in configurations:
arguments = [scons, '-C', options.toplevel_dir, '--mode=%s' % config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params): def GenerateOutput(target_list, target_dicts, data, params):
""" """
Generates all the output files for the specified targets. Generates all the output files for the specified targets.

19
tools/gyp/pylib/gyp/generator/xcode.py

@ -587,6 +587,25 @@ def EscapeXCodeArgument(s):
return '"' + s + '"' return '"' + s + '"'
def PerformBuild(data, configurations, params):
options = params['options']
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params): def GenerateOutput(target_list, target_dicts, data, params):
options = params['options'] options = params['options']
generator_flags = params.get('generator_flags', {}) generator_flags = params.get('generator_flags', {})

456
tools/gyp/pylib/gyp/input.py

@ -12,12 +12,17 @@ from compiler.ast import Stmt
import compiler import compiler
import copy import copy
import gyp.common import gyp.common
import multiprocessing
import optparse import optparse
import os.path import os.path
import re import re
import shlex import shlex
import signal
import subprocess import subprocess
import sys import sys
import threading
import time
from gyp.common import GypError
# A list of types that are treated as linkable. # A list of types that are treated as linkable.
@ -79,6 +84,7 @@ base_non_configuration_keys = [
'rules', 'rules',
'run_as', 'run_as',
'sources', 'sources',
'standalone_static_library',
'suppress_wildcard', 'suppress_wildcard',
'target_name', 'target_name',
'toolset', 'toolset',
@ -102,6 +108,7 @@ invalid_configuration_keys = [
'libraries', 'libraries',
'link_settings', 'link_settings',
'sources', 'sources',
'standalone_static_library',
'target_name', 'target_name',
'type', 'type',
] ]
@ -175,9 +182,9 @@ def CheckNode(node, keypath):
assert isinstance(c[n], Const) assert isinstance(c[n], Const)
key = c[n].getChildren()[0] key = c[n].getChildren()[0]
if key in dict: if key in dict:
raise KeyError, "Key '" + key + "' repeated at level " + \ raise GypError("Key '" + key + "' repeated at level " +
repr(len(keypath) + 1) + " with key path '" + \ repr(len(keypath) + 1) + " with key path '" +
'.'.join(keypath) + "'" '.'.join(keypath) + "'")
kp = list(keypath) # Make a copy of the list for descending this node. kp = list(keypath) # Make a copy of the list for descending this node.
kp.append(key) kp.append(key)
dict[key] = CheckNode(c[n + 1], kp) dict[key] = CheckNode(c[n + 1], kp)
@ -205,7 +212,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
if os.path.exists(build_file_path): if os.path.exists(build_file_path):
build_file_contents = open(build_file_path).read() build_file_contents = open(build_file_path).read()
else: else:
raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
build_file_data = None build_file_data = None
try: try:
@ -329,7 +336,7 @@ def ProcessToolsetsInDict(data):
# a build file that contains targets and is expected to provide a targets dict # a build file that contains targets and is expected to provide a targets dict
# that contains the targets... # that contains the targets...
def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
depth, check): depth, check, load_dependencies):
# If depth is set, predefine the DEPTH variable to be a relative path from # If depth is set, predefine the DEPTH variable to be a relative path from
# this build file's directory to the directory identified by depth. # this build file's directory to the directory identified by depth.
if depth: if depth:
@ -348,7 +355,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
if build_file_path in data['target_build_files']: if build_file_path in data['target_build_files']:
# Already loaded. # Already loaded.
return return False
data['target_build_files'].add(build_file_path) data['target_build_files'].add(build_file_path)
gyp.DebugOutput(gyp.DEBUG_INCLUDES, gyp.DebugOutput(gyp.DEBUG_INCLUDES,
@ -363,7 +370,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
# Set up the included_files key indicating which .gyp files contributed to # Set up the included_files key indicating which .gyp files contributed to
# this target dict. # this target dict.
if 'included_files' in build_file_data: if 'included_files' in build_file_data:
raise KeyError, build_file_path + ' must not contain included_files key' raise GypError(build_file_path + ' must not contain included_files key')
included = GetIncludedBuildFiles(build_file_path, aux_data) included = GetIncludedBuildFiles(build_file_path, aux_data)
build_file_data['included_files'] = [] build_file_data['included_files'] = []
@ -390,25 +397,25 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
# Look at each project's target_defaults dict, and merge settings into # Look at each project's target_defaults dict, and merge settings into
# targets. # targets.
if 'target_defaults' in build_file_data: if 'target_defaults' in build_file_data:
if 'targets' not in build_file_data:
raise GypError("Unable to find targets in build file %s" %
build_file_path)
index = 0 index = 0
if 'targets' in build_file_data: while index < len(build_file_data['targets']):
while index < len(build_file_data['targets']): # This procedure needs to give the impression that target_defaults is
# This procedure needs to give the impression that target_defaults is # used as defaults, and the individual targets inherit from that.
# used as defaults, and the individual targets inherit from that. # The individual targets need to be merged into the defaults. Make
# The individual targets need to be merged into the defaults. Make # a deep copy of the defaults for each target, merge the target dict
# a deep copy of the defaults for each target, merge the target dict # as found in the input file into that copy, and then hook up the
# as found in the input file into that copy, and then hook up the # copy with the target-specific data merged into it as the replacement
# copy with the target-specific data merged into it as the replacement # target dict.
# target dict. old_target_dict = build_file_data['targets'][index]
old_target_dict = build_file_data['targets'][index] new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
new_target_dict = copy.deepcopy(build_file_data['target_defaults']) MergeDicts(new_target_dict, old_target_dict,
MergeDicts(new_target_dict, old_target_dict, build_file_path, build_file_path)
build_file_path, build_file_path) build_file_data['targets'][index] = new_target_dict
build_file_data['targets'][index] = new_target_dict index += 1
index = index + 1
else:
raise Exception, \
"Unable to find targets in build file %s" % build_file_path
# No longer needed. # No longer needed.
del build_file_data['target_defaults'] del build_file_data['target_defaults']
@ -418,22 +425,182 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
# in other words, you can't put a "dependencies" section inside a "post" # in other words, you can't put a "dependencies" section inside a "post"
# conditional within a target. # conditional within a target.
dependencies = []
if 'targets' in build_file_data: if 'targets' in build_file_data:
for target_dict in build_file_data['targets']: for target_dict in build_file_data['targets']:
if 'dependencies' not in target_dict: if 'dependencies' not in target_dict:
continue continue
for dependency in target_dict['dependencies']: for dependency in target_dict['dependencies']:
other_build_file = \ dependencies.append(
gyp.common.ResolveTarget(build_file_path, dependency, None)[0] gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
try:
LoadTargetBuildFile(other_build_file, data, aux_data, variables, if load_dependencies:
includes, depth, check) for dependency in dependencies:
except Exception, e: try:
gyp.common.ExceptionAppend( LoadTargetBuildFile(dependency, data, aux_data, variables,
e, 'while loading dependencies of %s' % build_file_path) includes, depth, check, load_dependencies)
raise except Exception, e:
gyp.common.ExceptionAppend(
e, 'while loading dependencies of %s' % build_file_path)
raise
else:
return (build_file_path, dependencies)
def CallLoadTargetBuildFile(global_flags,
build_file_path, data,
aux_data, variables,
includes, depth, check):
"""Wrapper around LoadTargetBuildFile for parallel processing.
This wrapper is used when LoadTargetBuildFile is executed in
a worker process.
"""
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Apply globals so that the worker process behaves the same.
for key, value in global_flags.iteritems():
globals()[key] = value
# Save the keys so we can return data that changed.
data_keys = set(data)
aux_data_keys = set(aux_data)
result = LoadTargetBuildFile(build_file_path, data,
aux_data, variables,
includes, depth, check, False)
if not result:
return result
(build_file_path, dependencies) = result
data_out = {}
for key in data:
if key == 'target_build_files':
continue
if key not in data_keys:
data_out[key] = data[key]
aux_data_out = {}
for key in aux_data:
if key not in aux_data_keys:
aux_data_out[key] = aux_data[key]
# This gets serialized and sent back to the main process via a pipe.
# It's handled in LoadTargetBuildFileCallback.
return (build_file_path,
data_out,
aux_data_out,
dependencies)
except Exception, e:
print "Exception: ", e
return None
class ParallelProcessingError(Exception):
pass
class ParallelState(object):
"""Class to keep track of state when processing input files in parallel.
If build files are loaded in parallel, use this to keep track of
state during farming out and processing parallel jobs. It's stored
in a global so that the callback function can have access to it.
"""
def __init__(self):
# The multiprocessing pool.
self.pool = None
# The condition variable used to protect this object and notify
# the main loop when there might be more data to process.
self.condition = None
# The "data" dict that was passed to LoadTargetBuildFileParallel
self.data = None
# The "aux_data" dict that was passed to LoadTargetBuildFileParallel
self.aux_data = None
# The number of parallel calls outstanding; decremented when a response
# was received.
self.pending = 0
# The set of all build files that have been scheduled, so we don't
# schedule the same one twice.
self.scheduled = set()
# A list of dependency build file paths that haven't been scheduled yet.
self.dependencies = []
# Flag to indicate if there was an error in a child process.
self.error = False
return data def LoadTargetBuildFileCallback(self, result):
"""Handle the results of running LoadTargetBuildFile in another process.
"""
self.condition.acquire()
if not result:
self.error = True
self.condition.notify()
self.condition.release()
return
(build_file_path0, data0, aux_data0, dependencies0) = result
self.data['target_build_files'].add(build_file_path0)
for key in data0:
self.data[key] = data0[key]
for key in aux_data0:
self.aux_data[key] = aux_data0[key]
for new_dependency in dependencies0:
if new_dependency not in self.scheduled:
self.scheduled.add(new_dependency)
self.dependencies.append(new_dependency)
self.pending -= 1
self.condition.notify()
self.condition.release()
def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
variables, includes, depth, check):
parallel_state = ParallelState()
parallel_state.condition = threading.Condition()
parallel_state.dependencies = [build_file_path]
parallel_state.scheduled = set([build_file_path])
parallel_state.pending = 0
parallel_state.data = data
parallel_state.aux_data = aux_data
try:
parallel_state.condition.acquire()
while parallel_state.dependencies or parallel_state.pending:
if parallel_state.error:
break
if not parallel_state.dependencies:
parallel_state.condition.wait()
continue
dependency = parallel_state.dependencies.pop()
parallel_state.pending += 1
data_in = {}
data_in['target_build_files'] = data['target_build_files']
aux_data_in = {}
global_flags = {
'path_sections': globals()['path_sections'],
'non_configuration_keys': globals()['non_configuration_keys'],
'absolute_build_file_paths': globals()['absolute_build_file_paths'],
'multiple_toolsets': globals()['multiple_toolsets']}
if not parallel_state.pool:
parallel_state.pool = multiprocessing.Pool(8)
parallel_state.pool.apply_async(
CallLoadTargetBuildFile,
args = (global_flags, dependency,
data_in, aux_data_in,
variables, includes, depth, check),
callback = parallel_state.LoadTargetBuildFileCallback)
except KeyboardInterrupt, e:
parallel_state.pool.terminate()
raise e
parallel_state.condition.release()
if parallel_state.error:
sys.exit()
# Look for the bracket that matches the first bracket seen in a # Look for the bracket that matches the first bracket seen in a
@ -693,8 +860,8 @@ def ExpandVariables(input, phase, variables, build_file):
os.chdir(oldwd) os.chdir(oldwd)
assert replacement != None assert replacement != None
elif command_string: elif command_string:
raise Exception("Unknown command string '%s' in '%s'." % raise GypError("Unknown command string '%s' in '%s'." %
(command_string, contents)) (command_string, contents))
else: else:
# Fix up command with platform specific workarounds. # Fix up command with platform specific workarounds.
contents = FixupPlatformCommand(contents) contents = FixupPlatformCommand(contents)
@ -710,8 +877,8 @@ def ExpandVariables(input, phase, variables, build_file):
sys.stderr.write(p_stderr) sys.stderr.write(p_stderr)
# Simulate check_call behavior, since check_call only exists # Simulate check_call behavior, since check_call only exists
# in python 2.5 and later. # in python 2.5 and later.
raise Exception("Call to '%s' returned exit status %d." % raise GypError("Call to '%s' returned exit status %d." %
(contents, p.returncode)) (contents, p.returncode))
replacement = p_stdout.rstrip() replacement = p_stdout.rstrip()
cached_command_results[cache_key] = replacement cached_command_results[cache_key] = replacement
@ -735,8 +902,8 @@ def ExpandVariables(input, phase, variables, build_file):
# ], # ],
replacement = [] replacement = []
else: else:
raise KeyError, 'Undefined variable ' + contents + \ raise GypError('Undefined variable ' + contents +
' in ' + build_file ' in ' + build_file)
else: else:
replacement = variables[contents] replacement = variables[contents]
@ -744,10 +911,10 @@ def ExpandVariables(input, phase, variables, build_file):
for item in replacement: for item in replacement:
if (not contents[-1] == '/' and if (not contents[-1] == '/' and
not isinstance(item, str) and not isinstance(item, int)): not isinstance(item, str) and not isinstance(item, int)):
raise TypeError, 'Variable ' + contents + \ raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' + \ ' must expand to a string or list of strings; ' +
'list contains a ' + \ 'list contains a ' +
item.__class__.__name__ item.__class__.__name__)
# Run through the list and handle variable expansions in it. Since # Run through the list and handle variable expansions in it. Since
# the list is guaranteed not to contain dicts, this won't do anything # the list is guaranteed not to contain dicts, this won't do anything
# with conditions sections. # with conditions sections.
@ -755,9 +922,9 @@ def ExpandVariables(input, phase, variables, build_file):
build_file) build_file)
elif not isinstance(replacement, str) and \ elif not isinstance(replacement, str) and \
not isinstance(replacement, int): not isinstance(replacement, int):
raise TypeError, 'Variable ' + contents + \ raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' + \ ' must expand to a string or list of strings; ' +
'found a ' + replacement.__class__.__name__ 'found a ' + replacement.__class__.__name__)
if expand_to_list: if expand_to_list:
# Expanding in list context. It's guaranteed that there's only one # Expanding in list context. It's guaranteed that there's only one
@ -855,12 +1022,12 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file):
for condition in conditions_list: for condition in conditions_list:
if not isinstance(condition, list): if not isinstance(condition, list):
raise TypeError, conditions_key + ' must be a list' raise GypError(conditions_key + ' must be a list')
if len(condition) != 2 and len(condition) != 3: if len(condition) != 2 and len(condition) != 3:
# It's possible that condition[0] won't work in which case this # It's possible that condition[0] won't work in which case this
# attempt will raise its own IndexError. That's probably fine. # attempt will raise its own IndexError. That's probably fine.
raise IndexError, conditions_key + ' ' + condition[0] + \ raise GypError(conditions_key + ' ' + condition[0] +
' must be length 2 or 3, not ' + str(len(condition)) ' must be length 2 or 3, not ' + str(len(condition)))
[cond_expr, true_dict] = condition[0:2] [cond_expr, true_dict] = condition[0:2]
false_dict = None false_dict = None
@ -1110,7 +1277,7 @@ def BuildTargetsDict(data):
target['target_name'], target['target_name'],
target['toolset']) target['toolset'])
if target_name in targets: if target_name in targets:
raise KeyError, 'Duplicate target definitions for ' + target_name raise GypError('Duplicate target definitions for ' + target_name)
targets[target_name] = target targets[target_name] = target
return targets return targets
@ -1151,8 +1318,8 @@ def QualifyDependencies(targets):
# appears in the "dependencies" list. # appears in the "dependencies" list.
if dependency_key != 'dependencies' and \ if dependency_key != 'dependencies' and \
dependency not in target_dict['dependencies']: dependency not in target_dict['dependencies']:
raise KeyError, 'Found ' + dependency + ' in ' + dependency_key + \ raise GypError('Found ' + dependency + ' in ' + dependency_key +
' of ' + target + ', but not in dependencies' ' of ' + target + ', but not in dependencies')
def ExpandWildcardDependencies(targets, data): def ExpandWildcardDependencies(targets, data):
@ -1191,8 +1358,8 @@ def ExpandWildcardDependencies(targets, data):
if dependency_build_file == target_build_file: if dependency_build_file == target_build_file:
# It's an error for a target to depend on all other targets in # It's an error for a target to depend on all other targets in
# the same file, because a target cannot depend on itself. # the same file, because a target cannot depend on itself.
raise KeyError, 'Found wildcard in ' + dependency_key + ' of ' + \ raise GypError('Found wildcard in ' + dependency_key + ' of ' +
target + ' referring to same build file' target + ' referring to same build file')
# Take the wildcard out and adjust the index so that the next # Take the wildcard out and adjust the index so that the next
# dependency in the list will be processed the next time through the # dependency in the list will be processed the next time through the
@ -1249,7 +1416,7 @@ class DependencyGraphNode(object):
dependents: List of DependencyGraphNodes that depend on this one. dependents: List of DependencyGraphNodes that depend on this one.
""" """
class CircularException(Exception): class CircularException(GypError):
pass pass
def __init__(self, ref): def __init__(self, ref):
@ -1396,14 +1563,14 @@ class DependencyGraphNode(object):
# but that's presently the easiest way to access the target dicts so that # but that's presently the easiest way to access the target dicts so that
# this function can find target types. # this function can find target types.
if not 'target_name' in targets[self.ref]: if 'target_name' not in targets[self.ref]:
raise Exception("Missing 'target_name' field in target.") raise GypError("Missing 'target_name' field in target.")
try: if 'type' not in targets[self.ref]:
target_type = targets[self.ref]['type'] raise GypError("Missing 'type' field in target %s" %
except KeyError, e: targets[self.ref]['target_name'])
raise Exception("Missing 'type' field in target %s" %
targets[self.ref]['target_name']) target_type = targets[self.ref]['type']
is_linkable = target_type in linkable_types is_linkable = target_type in linkable_types
@ -1447,7 +1614,7 @@ def BuildDependencyList(targets):
# access. # access.
dependency_nodes = {} dependency_nodes = {}
for target, spec in targets.iteritems(): for target, spec in targets.iteritems():
if not target in dependency_nodes: if target not in dependency_nodes:
dependency_nodes[target] = DependencyGraphNode(target) dependency_nodes[target] = DependencyGraphNode(target)
# Set up the dependency links. Targets that have no dependencies are treated # Set up the dependency links. Targets that have no dependencies are treated
@ -1456,21 +1623,18 @@ def BuildDependencyList(targets):
for target, spec in targets.iteritems(): for target, spec in targets.iteritems():
target_node = dependency_nodes[target] target_node = dependency_nodes[target]
target_build_file = gyp.common.BuildFile(target) target_build_file = gyp.common.BuildFile(target)
if not 'dependencies' in spec or len(spec['dependencies']) == 0: dependencies = spec.get('dependencies')
if not dependencies:
target_node.dependencies = [root_node] target_node.dependencies = [root_node]
root_node.dependents.append(target_node) root_node.dependents.append(target_node)
else: else:
dependencies = spec['dependencies'] for dependency in dependencies:
for index in xrange(0, len(dependencies)): dependency_node = dependency_nodes.get(dependency)
try: if not dependency_node:
dependency = dependencies[index] raise GypError("Dependency '%s' not found while "
dependency_node = dependency_nodes[dependency] "trying to load target %s" % (dependency, target))
target_node.dependencies.append(dependency_node) target_node.dependencies.append(dependency_node)
dependency_node.dependents.append(target_node) dependency_node.dependents.append(target_node)
except KeyError, e:
gyp.common.ExceptionAppend(e,
'while trying to load target %s' % target)
raise
flat_list = root_node.FlattenToList() flat_list = root_node.FlattenToList()
@ -1478,9 +1642,9 @@ def BuildDependencyList(targets):
# (cycle). If you need to figure out what's wrong, look for elements of # (cycle). If you need to figure out what's wrong, look for elements of
# targets that are not in flat_list. # targets that are not in flat_list.
if len(flat_list) != len(targets): if len(flat_list) != len(targets):
raise DependencyGraphNode.CircularException, \ raise DependencyGraphNode.CircularException(
'Some targets not reachable, cycle in dependency graph detected: ' + \ 'Some targets not reachable, cycle in dependency graph detected: ' +
' '.join(set(flat_list) ^ set(targets)) ' '.join(set(flat_list) ^ set(targets)))
return [dependency_nodes, flat_list] return [dependency_nodes, flat_list]
@ -1502,18 +1666,22 @@ def VerifyNoGYPFileCircularDependencies(targets):
for dependency in target_dependencies: for dependency in target_dependencies:
try: try:
dependency_build_file = gyp.common.BuildFile(dependency) dependency_build_file = gyp.common.BuildFile(dependency)
if dependency_build_file == build_file: except GypError, e:
# A .gyp file is allowed to refer back to itself.
continue
dependency_node = dependency_nodes[dependency_build_file]
if dependency_node not in build_file_node.dependencies:
build_file_node.dependencies.append(dependency_node)
dependency_node.dependents.append(build_file_node)
except KeyError, e:
gyp.common.ExceptionAppend( gyp.common.ExceptionAppend(
e, 'while computing dependencies of .gyp file %s' % build_file) e, 'while computing dependencies of .gyp file %s' % build_file)
raise raise
if dependency_build_file == build_file:
# A .gyp file is allowed to refer back to itself.
continue
dependency_node = dependency_nodes.get(dependency_build_file)
if not dependency_node:
raise GypError("Dependancy '%s' not found" % dependency_build_file)
if dependency_node not in build_file_node.dependencies:
build_file_node.dependencies.append(dependency_node)
dependency_node.dependents.append(build_file_node)
# Files that have no dependencies are treated as dependent on root_node. # Files that have no dependencies are treated as dependent on root_node.
root_node = DependencyGraphNode(None) root_node = DependencyGraphNode(None)
for build_file_node in dependency_nodes.itervalues(): for build_file_node in dependency_nodes.itervalues():
@ -1552,8 +1720,8 @@ def DoDependentSettings(key, flat_list, targets, dependency_nodes):
elif key == 'link_settings': elif key == 'link_settings':
dependencies = dependency_nodes[target].LinkDependencies(targets) dependencies = dependency_nodes[target].LinkDependencies(targets)
else: else:
raise KeyError, "DoDependentSettings doesn't know how to determine " + \ raise GypError("DoDependentSettings doesn't know how to determine "
'dependencies for ' + key 'dependencies for ' + key)
for dependency in dependencies: for dependency in dependencies:
dependency_dict = targets[dependency] dependency_dict = targets[dependency]
@ -1819,8 +1987,8 @@ def MergeDicts(to, fro, to_file, fro_file):
# and prepend are the only policies that can coexist. # and prepend are the only policies that can coexist.
for list_incompatible in lists_incompatible: for list_incompatible in lists_incompatible:
if list_incompatible in fro: if list_incompatible in fro:
raise KeyError, 'Incompatible list policies ' + k + ' and ' + \ raise GypError('Incompatible list policies ' + k + ' and ' +
list_incompatible list_incompatible)
if list_base in to: if list_base in to:
if ext == '?': if ext == '?':
@ -1952,8 +2120,8 @@ def SetUpConfigurations(target, target_dict):
configuration_dict = target_dict['configurations'][configuration] configuration_dict = target_dict['configurations'][configuration]
for key in configuration_dict.keys(): for key in configuration_dict.keys():
if key in invalid_configuration_keys: if key in invalid_configuration_keys:
raise KeyError, ('%s not allowed in the %s configuration, found in ' raise GypError('%s not allowed in the %s configuration, found in '
'target %s' % (key, configuration, target)) 'target %s' % (key, configuration, target))
@ -2084,9 +2252,9 @@ def ProcessListFiltersInDict(name, the_dict):
# to be created. # to be created.
excluded_key = list_key + '_excluded' excluded_key = list_key + '_excluded'
if excluded_key in the_dict: if excluded_key in the_dict:
raise KeyError, \ raise GypError(name + ' key ' + excluded_key +
name + ' key ' + excluded_key + ' must not be present prior ' + \ ' must not be present prior '
' to applying exclusion/regex filters for ' + list_key ' to applying exclusion/regex filters for ' + list_key)
excluded_list = [] excluded_list = []
@ -2136,9 +2304,14 @@ def ValidateTargetType(target, target_dict):
'none') 'none')
target_type = target_dict.get('type', None) target_type = target_dict.get('type', None)
if target_type not in VALID_TARGET_TYPES: if target_type not in VALID_TARGET_TYPES:
raise Exception("Target %s has an invalid target type '%s'. " raise GypError("Target %s has an invalid target type '%s'. "
"Must be one of %s." % "Must be one of %s." %
(target, target_type, '/'.join(VALID_TARGET_TYPES))) (target, target_type, '/'.join(VALID_TARGET_TYPES)))
if (target_dict.get('standalone_static_library', 0) and
not target_type == 'static_library'):
raise GypError('Target %s has type %s but standalone_static_library flag is'
' only valid for static_library type.' % (target,
target_type))
def ValidateSourcesInTarget(target, target_dict, build_file): def ValidateSourcesInTarget(target, target_dict, build_file):
@ -2162,10 +2335,10 @@ def ValidateSourcesInTarget(target, target_dict, build_file):
error += ' %s: %s\n' % (basename, ' '.join(files)) error += ' %s: %s\n' % (basename, ' '.join(files))
if error: if error:
print ('static library %s has several files with the same basename:\n' % print('static library %s has several files with the same basename:\n' %
target + error + 'Some build systems, e.g. MSVC08, ' target + error + 'Some build systems, e.g. MSVC08, '
'cannot handle that.') 'cannot handle that.')
raise KeyError, 'Duplicate basenames in sources section, see list above' raise GypError('Duplicate basenames in sources section, see list above')
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
@ -2189,25 +2362,25 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
# Make sure that there's no conflict among rule names and extensions. # Make sure that there's no conflict among rule names and extensions.
rule_name = rule['rule_name'] rule_name = rule['rule_name']
if rule_name in rule_names: if rule_name in rule_names:
raise KeyError, 'rule %s exists in duplicate, target %s' % \ raise GypError('rule %s exists in duplicate, target %s' %
(rule_name, target) (rule_name, target))
rule_names[rule_name] = rule rule_names[rule_name] = rule
rule_extension = rule['extension'] rule_extension = rule['extension']
if rule_extension in rule_extensions: if rule_extension in rule_extensions:
raise KeyError, ('extension %s associated with multiple rules, ' + raise GypError(('extension %s associated with multiple rules, ' +
'target %s rules %s and %s') % \ 'target %s rules %s and %s') %
(rule_extension, target, (rule_extension, target,
rule_extensions[rule_extension]['rule_name'], rule_extensions[rule_extension]['rule_name'],
rule_name) rule_name))
rule_extensions[rule_extension] = rule rule_extensions[rule_extension] = rule
# Make sure rule_sources isn't already there. It's going to be # Make sure rule_sources isn't already there. It's going to be
# created below if needed. # created below if needed.
if 'rule_sources' in rule: if 'rule_sources' in rule:
raise KeyError, \ raise GypError(
'rule_sources must not exist in input, target %s rule %s' % \ 'rule_sources must not exist in input, target %s rule %s' %
(target, rule_name) (target, rule_name))
extension = rule['extension'] extension = rule['extension']
rule_sources = [] rule_sources = []
@ -2231,28 +2404,28 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
if not run_as: if not run_as:
return return
if not isinstance(run_as, dict): if not isinstance(run_as, dict):
raise Exception("The 'run_as' in target %s from file %s should be a " raise GypError("The 'run_as' in target %s from file %s should be a "
"dictionary." % "dictionary." %
(target_name, build_file)) (target_name, build_file))
action = run_as.get('action') action = run_as.get('action')
if not action: if not action:
raise Exception("The 'run_as' in target %s from file %s must have an " raise GypError("The 'run_as' in target %s from file %s must have an "
"'action' section." % "'action' section." %
(target_name, build_file)) (target_name, build_file))
if not isinstance(action, list): if not isinstance(action, list):
raise Exception("The 'action' for 'run_as' in target %s from file %s " raise GypError("The 'action' for 'run_as' in target %s from file %s "
"must be a list." % "must be a list." %
(target_name, build_file)) (target_name, build_file))
working_directory = run_as.get('working_directory') working_directory = run_as.get('working_directory')
if working_directory and not isinstance(working_directory, str): if working_directory and not isinstance(working_directory, str):
raise Exception("The 'working_directory' for 'run_as' in target %s " raise GypError("The 'working_directory' for 'run_as' in target %s "
"in file %s should be a string." % "in file %s should be a string." %
(target_name, build_file)) (target_name, build_file))
environment = run_as.get('environment') environment = run_as.get('environment')
if environment and not isinstance(environment, dict): if environment and not isinstance(environment, dict):
raise Exception("The 'environment' for 'run_as' in target %s " raise GypError("The 'environment' for 'run_as' in target %s "
"in file %s should be a dictionary." % "in file %s should be a dictionary." %
(target_name, build_file)) (target_name, build_file))
def ValidateActionsInTarget(target, target_dict, build_file): def ValidateActionsInTarget(target, target_dict, build_file):
@ -2262,15 +2435,15 @@ def ValidateActionsInTarget(target, target_dict, build_file):
for action in actions: for action in actions:
action_name = action.get('action_name') action_name = action.get('action_name')
if not action_name: if not action_name:
raise Exception("Anonymous action in target %s. " raise GypError("Anonymous action in target %s. "
"An action must have an 'action_name' field." % "An action must have an 'action_name' field." %
target_name) target_name)
inputs = action.get('inputs', None) inputs = action.get('inputs', None)
if inputs is None: if inputs is None:
raise Exception('Action in target %s has no inputs.' % target_name) raise GypError('Action in target %s has no inputs.' % target_name)
action_command = action.get('action') action_command = action.get('action')
if action_command and not action_command[0]: if action_command and not action_command[0]:
raise Exception("Empty action as command in target %s." % target_name) raise GypError("Empty action as command in target %s." % target_name)
def TurnIntIntoStrInDict(the_dict): def TurnIntIntoStrInDict(the_dict):
@ -2327,13 +2500,13 @@ def VerifyNoCollidingTargets(targets):
key = subdir + ':' + name key = subdir + ':' + name
if key in used: if key in used:
# Complain if this target is already used. # Complain if this target is already used.
raise Exception('Duplicate target name "%s" in directory "%s" used both ' raise GypError('Duplicate target name "%s" in directory "%s" used both '
'in "%s" and "%s".' % (name, subdir, gyp, used[key])) 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
used[key] = gyp used[key] = gyp
def Load(build_files, variables, includes, depth, generator_input_info, check, def Load(build_files, variables, includes, depth, generator_input_info, check,
circular_check): circular_check, parallel):
# Set up path_sections and non_configuration_keys with the default data plus # Set up path_sections and non_configuration_keys with the default data plus
# the generator-specifc data. # the generator-specifc data.
global path_sections global path_sections
@ -2374,8 +2547,13 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
# used as keys to the data dict and for references between input files. # used as keys to the data dict and for references between input files.
build_file = os.path.normpath(build_file) build_file = os.path.normpath(build_file)
try: try:
LoadTargetBuildFile(build_file, data, aux_data, variables, includes, if parallel:
depth, check) print >>sys.stderr, 'Using parallel processing (experimental).'
LoadTargetBuildFileParallel(build_file, data, aux_data,
variables, includes, depth, check)
else:
LoadTargetBuildFile(build_file, data, aux_data,
variables, includes, depth, check, True)
except Exception, e: except Exception, e:
gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
raise raise

5
tools/gyp/pylib/gyp/mac_tool.py

@ -163,9 +163,10 @@ class MacTool(object):
"""Calls libtool and filters out 'libtool: file: foo.o has no symbols'.""" """Calls libtool and filters out 'libtool: file: foo.o has no symbols'."""
libtool_re = re.compile(r'^libtool: file: .* has no symbols$') libtool_re = re.compile(r'^libtool: file: .* has no symbols$')
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE) libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
for line in libtoolout.stderr: _, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line): if not libtool_re.match(line):
sys.stderr.write(line) print >>sys.stderr, line
return libtoolout.returncode return libtoolout.returncode
def ExecPackageFramework(self, framework, version): def ExecPackageFramework(self, framework, version):

113
tools/gyp/pylib/gyp/msvs_emulation.py

@ -152,6 +152,7 @@ class MsvsSettings(object):
('msvs_disabled_warnings', list), ('msvs_disabled_warnings', list),
('msvs_precompiled_header', str), ('msvs_precompiled_header', str),
('msvs_precompiled_source', str), ('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str), ('msvs_target_platform', str),
] ]
configs = spec['configurations'] configs = spec['configurations']
@ -165,8 +166,7 @@ class MsvsSettings(object):
def GetVSMacroEnv(self, base_to_build=None, config=None): def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp """Get a dict of variables mapping internal VS macro names to their gyp
equivalents.""" equivalents."""
target_platform = self.GetTargetPlatform(config) target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_platform = {'x86': 'Win32'}.get(target_platform, target_platform)
replacements = { replacements = {
'$(VSInstallDir)': self.vs_version.Path(), '$(VSInstallDir)': self.vs_version.Path(),
'$(VCInstallDir)': os.path.join(self.vs_version.Path(), 'VC') + '\\', '$(VCInstallDir)': os.path.join(self.vs_version.Path(), 'VC') + '\\',
@ -215,29 +215,40 @@ class MsvsSettings(object):
return self.parent._GetAndMunge(self.field, self.base_path + [name], return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map) default=default, prefix=prefix, append=self.append, map=map)
def GetTargetPlatform(self, config): def GetArch(self, config):
target_platform = self.msvs_target_platform.get(config, '') """Get architecture based on msvs_configuration_platform and
if not target_platform: msvs_target_platform. Returns either 'x86' or 'x64'."""
target_platform = 'Win32' configuration_platform = self.msvs_configuration_platform.get(config, '')
return {'Win32': 'x86'}.get(target_platform, target_platform) platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
def _RealConfig(self, config): platform = configuration_platform
target_platform = self.GetTargetPlatform(config) # Map from platform to architecture.
if target_platform == 'x64' and not config.endswith('_x64'): return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64' config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config return config
def _Setting(self, path, config, def _Setting(self, path, config,
default=None, prefix='', append=None, map=None): default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings.""" """_GetAndMunge for msvs_settings."""
config = self._RealConfig(config)
return self._GetAndMunge( return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map) self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config, def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None): default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes.""" """_GetAndMunge for msvs_configuration_attributes."""
config = self._RealConfig(config)
return self._GetAndMunge( return self._GetAndMunge(
self.msvs_configuration_attributes[config], self.msvs_configuration_attributes[config],
path, default, prefix, append, map) path, default, prefix, append, map)
@ -245,7 +256,7 @@ class MsvsSettings(object):
def AdjustIncludeDirs(self, include_dirs, config): def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system """Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar.""" include dirs used for platform SDK and similar."""
config = self._RealConfig(config) config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config] includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting( includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[])) ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
@ -254,7 +265,7 @@ class MsvsSettings(object):
def GetComputedDefines(self, config): def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based """Returns the set of defines that are injected to the defines list based
on other VS settings.""" on other VS settings."""
config = self._RealConfig(config) config = self._TargetConfig(config)
defines = [] defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1': if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE')) defines.extend(('_UNICODE', 'UNICODE'))
@ -267,7 +278,7 @@ class MsvsSettings(object):
def GetOutputName(self, config, expand_special): def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None """Gets the explicitly overridden output name for a target or returns None
if it's not overridden.""" if it's not overridden."""
config = self._RealConfig(config) config = self._TargetConfig(config)
type = self.spec['type'] type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool' root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile. # TODO(scottmg): Handle OutputDirectory without OutputFile.
@ -277,9 +288,19 @@ class MsvsSettings(object):
output_file, config=config)) output_file, config=config))
return output_file return output_file
def GetPDBName(self, config, expand_special):
"""Gets the explicitly overridden pdb name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetCflags(self, config): def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations.""" """Returns the flags that need to be added to .c and .cc compilations."""
config = self._RealConfig(config) config = self._TargetConfig(config)
cflags = [] cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]]) cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config], cl = self._GetWrapper(self, self.msvs_settings[config],
@ -302,6 +323,7 @@ class MsvsSettings(object):
cl('RuntimeLibrary', cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M') map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH') cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='') cl('AdditionalOptions', prefix='')
# ninja handles parallelism by itself, don't have the compiler do it too. # ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags) cflags = filter(lambda x: not x.startswith('/MP'), cflags)
@ -310,13 +332,13 @@ class MsvsSettings(object):
def GetPrecompiledHeader(self, config, gyp_to_build_path): def GetPrecompiledHeader(self, config, gyp_to_build_path):
"""Returns an object that handles the generation of precompiled header """Returns an object that handles the generation of precompiled header
build steps.""" build steps."""
config = self._RealConfig(config) config = self._TargetConfig(config)
return _PchHelper(self, config, gyp_to_build_path) return _PchHelper(self, config, gyp_to_build_path)
def _GetPchFlags(self, config, extension): def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support. """Get the flags to be added to the cflags for precompiled header support.
""" """
config = self._RealConfig(config) config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must # The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch # only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches. # flags when the language matches.
@ -329,18 +351,18 @@ class MsvsSettings(object):
def GetCflagsC(self, config): def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations.""" """Returns the flags that need to be added to .c compilations."""
config = self._RealConfig(config) config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c') return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config): def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations.""" """Returns the flags that need to be added to .cc compilations."""
config = self._RealConfig(config) config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc') return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories """Get and normalize the list of paths in AdditionalLibraryDirectories
setting.""" setting."""
config = self._RealConfig(config) config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'), libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[]) config, default=[])
libpaths = [os.path.normpath( libpaths = [os.path.normpath(
@ -350,7 +372,7 @@ class MsvsSettings(object):
def GetLibFlags(self, config, gyp_to_build_path): def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands.""" """Returns the flags that need to be added to lib commands."""
config = self._RealConfig(config) config = self._TargetConfig(config)
libflags = [] libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config], lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags) 'VCLibrarianTool', append=libflags)
@ -374,7 +396,7 @@ class MsvsSettings(object):
manifest_base_name, is_executable): manifest_base_name, is_executable):
"""Returns the flags that need to be added to link commands, and the """Returns the flags that need to be added to link commands, and the
manifest files.""" manifest files."""
config = self._RealConfig(config) config = self._TargetConfig(config)
ldflags = [] ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config], ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags) 'VCLinkerTool', append=ldflags)
@ -387,6 +409,9 @@ class MsvsSettings(object):
out = self.GetOutputName(config, expand_special) out = self.GetOutputName(config, expand_special)
if out: if out:
ldflags.append('/OUT:' + out) ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special)
if pdb:
ldflags.append('/PDB:' + pdb)
ld('AdditionalOptions', prefix='') ld('AdditionalOptions', prefix='')
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:') ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL') ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
@ -401,6 +426,7 @@ class MsvsSettings(object):
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:') ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'}) ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:') ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={ 'true': '/PROFILE'})
# TODO(scottmg): This should sort of be somewhere else (not really a flag). # TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='') ld('AdditionalDependencies', prefix='')
# TODO(scottmg): These too. # TODO(scottmg): These too.
@ -466,14 +492,14 @@ class MsvsSettings(object):
def IsUseLibraryDependencyInputs(self, config): def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency """Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib).""" Inputs (using component .objs of a given .lib)."""
config = self._RealConfig(config) config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config) uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true' return uldi == 'true'
def GetRcflags(self, config, gyp_to_ninja_path): def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource """Returns the flags that need to be added to invocations of the resource
compiler.""" compiler."""
config = self._RealConfig(config) config = self._TargetConfig(config)
rcflags = [] rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config], rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags) 'VCResourceCompilerTool', append=rcflags)
@ -510,18 +536,27 @@ class MsvsSettings(object):
return int(rule.get('msvs_cygwin_shell', return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0 self.spec.get('msvs_cygwin_shell', 1))) != 0
def HasExplicitIdlRules(self, spec): def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for idl files. When there isn't we """Determine if there's an explicit rule for a particular extension."""
need to generate implicit rules to build MIDL .idl files."""
for rule in spec.get('rules', []): for rule in spec.get('rules', []):
if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): if rule['extension'] == extension:
return True return True
return False return False
def HasExplicitIdlRules(self, spec):
"""Determine if there's an explicit rule for idl files. When there isn't we
need to generate implicit rules to build MIDL .idl files."""
return self._HasExplicitRuleForExtension(spec, 'idl')
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config): def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output """Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required.""" directory, outputs, and variables and flags that are required."""
config = self._RealConfig(config) config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool') midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None): def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default), return self.ConvertVSMacros(midl_get(name, default=default),
@ -689,3 +724,19 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb') f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block) f.write(env_block)
f.close() f.close()
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))

15
tools/gyp/pylib/gyp/ninja_syntax.py

@ -12,8 +12,8 @@ use Python.
import textwrap import textwrap
import re import re
def escape_spaces(word): def escape_path(word):
return word.replace('$ ','$$ ').replace(' ','$ ') return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
class Writer(object): class Writer(object):
def __init__(self, output, width=78): def __init__(self, output, width=78):
@ -35,8 +35,7 @@ class Writer(object):
self._line('%s = %s' % (key, value), indent) self._line('%s = %s' % (key, value), indent)
def rule(self, name, command, description=None, depfile=None, def rule(self, name, command, description=None, depfile=None,
generator=False, restat=False, rspfile=None, generator=False, restat=False, rspfile=None, rspfile_content=None):
rspfile_content=None):
self._line('rule %s' % name) self._line('rule %s' % name)
self.variable('command', command, indent=1) self.variable('command', command, indent=1)
if description: if description:
@ -56,15 +55,15 @@ class Writer(object):
variables=None): variables=None):
outputs = self._as_list(outputs) outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:] all_inputs = self._as_list(inputs)[:]
out_outputs = list(map(escape_spaces, outputs)) out_outputs = list(map(escape_path, outputs))
all_inputs = list(map(escape_spaces, all_inputs)) all_inputs = list(map(escape_path, all_inputs))
if implicit: if implicit:
implicit = map(escape_spaces, self._as_list(implicit)) implicit = map(escape_path, self._as_list(implicit))
all_inputs.append('|') all_inputs.append('|')
all_inputs.extend(implicit) all_inputs.extend(implicit)
if order_only: if order_only:
order_only = map(escape_spaces, self._as_list(order_only)) order_only = map(escape_path, self._as_list(order_only))
all_inputs.append('||') all_inputs.append('||')
all_inputs.extend(order_only) all_inputs.extend(order_only)

68
tools/gyp/pylib/gyp/system_test.py

@ -1,68 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import tempfile
import shutil
import subprocess
def TestCommands(commands, files={}, env={}):
"""Run commands in a temporary directory, returning true if they all succeed.
Return false on failures or if any commands produce output.
Arguments:
commands: an array of shell-interpretable commands, e.g. ['ls -l', 'pwd']
each will be expanded with Python %-expansion using env first.
files: a dictionary mapping filename to contents;
files will be created in the temporary directory before running
the command.
env: a dictionary of strings to expand commands with.
"""
tempdir = tempfile.mkdtemp()
try:
for name, contents in files.items():
f = open(os.path.join(tempdir, name), 'wb')
f.write(contents)
f.close()
for command in commands:
proc = subprocess.Popen(command % env, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=tempdir)
output = proc.communicate()[0]
if proc.returncode != 0 or output:
return False
return True
finally:
shutil.rmtree(tempdir)
return False
def TestArSupportsT(ar_command='ar', cc_command='cc'):
"""Test whether 'ar' supports the 'T' flag."""
return TestCommands(['%(cc)s -c test.c',
'%(ar)s crsT test.a test.o',
'%(cc)s test.a'],
files={'test.c': 'int main(){}'},
env={'ar': ar_command, 'cc': cc_command})
def main():
# Run the various test functions and print the results.
def RunTest(description, function, **kwargs):
print "Testing " + description + ':',
if function(**kwargs):
print 'ok'
else:
print 'fail'
RunTest("ar 'T' flag", TestArSupportsT)
RunTest("ar 'T' flag with ccache", TestArSupportsT, cc_command='ccache cc')
return 0
if __name__ == '__main__':
sys.exit(main())

43
tools/gyp/pylib/gyp/win_tool.py

@ -9,13 +9,13 @@
These functions are executed via gyp-win-tool when using the ninja generator. These functions are executed via gyp-win-tool when using the ninja generator.
""" """
from ctypes import windll, wintypes
import os import os
import shutil import shutil
import subprocess import subprocess
import sys import sys
import win32con
import win32file BASE_DIR = os.path.dirname(os.path.abspath(__file__))
import pywintypes
def main(args): def main(args):
@ -26,19 +26,28 @@ def main(args):
class LinkLock(object): class LinkLock(object):
"""A flock-style lock to limit the number of concurrent links to one. Based on """A flock-style lock to limit the number of concurrent links to one.
http://code.activestate.com/recipes/65203-portalocker-cross-platform-posixnt-api-for-flock-s/
Uses a session-local mutex based on the file's directory.
""" """
def __enter__(self): def __enter__(self):
self.file = open('LinkLock', 'w+') name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_')
self.file_handle = win32file._get_osfhandle(self.file.fileno()) self.mutex = windll.kernel32.CreateMutexW(
win32file.LockFileEx(self.file_handle, win32con.LOCKFILE_EXCLUSIVE_LOCK, wintypes.c_int(0),
0, -0x10000, pywintypes.OVERLAPPED()) wintypes.c_int(0),
wintypes.create_unicode_buffer(name))
assert self.mutex
result = windll.kernel32.WaitForSingleObject(
self.mutex, wintypes.c_int(0xFFFFFFFF))
# 0x80 means another process was killed without releasing the mutex, but
# that this process has been given ownership. This is fine for our
# purposes.
assert result in (0, 0x80), (
"%s, %s" % (result, windll.kernel32.GetLastError()))
def __exit__(self, type, value, traceback): def __exit__(self, type, value, traceback):
win32file.UnlockFileEx( windll.kernel32.ReleaseMutex(self.mutex)
self.file_handle, 0, -0x10000, pywintypes.OVERLAPPED()) windll.kernel32.CloseHandle(self.mutex)
self.file.close()
class WinTool(object): class WinTool(object):
@ -170,16 +179,6 @@ class WinTool(object):
print line print line
return popen.returncode return popen.returncode
def ExecClWrapper(self, arch, depname, *args):
"""Runs cl.exe and filters output through ninja-deplist-helper to get
dependendency information which is stored in |depname|."""
env = self._GetEnv(arch)
args = ' '.join(args) + \
'| ninja-deplist-helper -r . -q -f cl -o ' + depname + '"'
popen = subprocess.Popen(args, shell=True, env=env)
popen.wait()
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir): def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment """Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory.""" for |arch|. If |dir| is supplied, use that as the working directory."""

21
tools/gyp/pylib/gyp/xcode_emulation.py

@ -562,6 +562,22 @@ class XcodeSettings(object):
self.configname = None self.configname = None
return ldflags return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self): def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys """Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations.""" whose values are the same across all configurations."""
@ -923,6 +939,11 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
'TARGET_BUILD_DIR' : built_products_dir, 'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}', 'TEMP_DIR' : '${TMPDIR}',
} }
if xcode_settings.GetPerTargetSetting('SDKROOT'):
env['SDKROOT'] = xcode_settings._SdkPath()
else:
env['SDKROOT'] = ''
if spec['type'] in ( if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'): 'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()

113
tools/gyp/pylib/gyp/xcodeproj_file.py

@ -254,7 +254,7 @@ class XCObject(object):
but in some cases an object's parent may wish to push a but in some cases an object's parent may wish to push a
hashable value into its child, and it can do so by appending hashable value into its child, and it can do so by appending
to _hashables. to _hashables.
Attribues: Attributes:
id: The object's identifier, a 24-character uppercase hexadecimal string. id: The object's identifier, a 24-character uppercase hexadecimal string.
Usually, objects being created should not set id until the entire Usually, objects being created should not set id until the entire
project file structure is built. At that point, UpdateIDs() should project file structure is built. At that point, UpdateIDs() should
@ -392,7 +392,10 @@ class XCObject(object):
return hashables return hashables
def ComputeIDs(self, recursive=True, overwrite=True, hash=None): def HashablesForChild(self):
return None
def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
"""Set "id" properties deterministically. """Set "id" properties deterministically.
An object's "id" property is set based on a hash of its class type and An object's "id" property is set based on a hash of its class type and
@ -419,8 +422,10 @@ class XCObject(object):
hash.update(struct.pack('>i', len(data))) hash.update(struct.pack('>i', len(data)))
hash.update(data) hash.update(data)
if hash is None: if seed_hash is None:
hash = _new_sha1() seed_hash = _new_sha1()
hash = seed_hash.copy()
hashables = self.Hashables() hashables = self.Hashables()
assert len(hashables) > 0 assert len(hashables) > 0
@ -428,8 +433,17 @@ class XCObject(object):
_HashUpdate(hash, hashable) _HashUpdate(hash, hashable)
if recursive: if recursive:
hashables_for_child = self.HashablesForChild()
if hashables_for_child is None:
child_hash = hash
else:
assert len(hashables_for_child) > 0
child_hash = seed_hash.copy()
for hashable in hashables_for_child:
_HashUpdate(child_hash, hashable)
for child in self.Children(): for child in self.Children():
child.ComputeIDs(recursive, overwrite, hash.copy()) child.ComputeIDs(recursive, overwrite, child_hash)
if overwrite or self.id is None: if overwrite or self.id is None:
# Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
@ -1104,6 +1118,26 @@ class PBXGroup(XCHierarchicalElement):
for child in self._properties.get('children', []): for child in self._properties.get('children', []):
self._AddChildToDicts(child) self._AddChildToDicts(child)
def Hashables(self):
# super
hashables = XCHierarchicalElement.Hashables(self)
# It is not sufficient to just rely on name and parent to build a unique
# hashable : a node could have two child PBXGroup sharing a common name.
# To add entropy the hashable is enhanced with the names of all its
# children.
for child in self._properties.get('children', []):
child_name = child.Name()
if child_name != None:
hashables.append(child_name)
return hashables
def HashablesForChild(self):
# To avoid a circular reference the hashables used to compute a child id do
# not include the child names.
return XCHierarchicalElement.Hashables(self)
def _AddChildToDicts(self, child): def _AddChildToDicts(self, child):
# Sets up this PBXGroup object's dicts to reference the child properly. # Sets up this PBXGroup object's dicts to reference the child properly.
child_path = child.PathFromSourceTreeAndPath() child_path = child.PathFromSourceTreeAndPath()
@ -1440,40 +1474,41 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
# TODO(mark): This is the replacement for a replacement for a quick hack. # TODO(mark): This is the replacement for a replacement for a quick hack.
# It is no longer incredibly sucky, but this list needs to be extended. # It is no longer incredibly sucky, but this list needs to be extended.
extension_map = { extension_map = {
'a': 'archive.ar', 'a': 'archive.ar',
'app': 'wrapper.application', 'app': 'wrapper.application',
'bdic': 'file', 'bdic': 'file',
'bundle': 'wrapper.cfbundle', 'bundle': 'wrapper.cfbundle',
'c': 'sourcecode.c.c', 'c': 'sourcecode.c.c',
'cc': 'sourcecode.cpp.cpp', 'cc': 'sourcecode.cpp.cpp',
'cpp': 'sourcecode.cpp.cpp', 'cpp': 'sourcecode.cpp.cpp',
'css': 'text.css', 'css': 'text.css',
'cxx': 'sourcecode.cpp.cpp', 'cxx': 'sourcecode.cpp.cpp',
'dylib': 'compiled.mach-o.dylib', 'dylib': 'compiled.mach-o.dylib',
'framework': 'wrapper.framework', 'framework': 'wrapper.framework',
'h': 'sourcecode.c.h', 'h': 'sourcecode.c.h',
'hxx': 'sourcecode.cpp.h', 'hxx': 'sourcecode.cpp.h',
'icns': 'image.icns', 'icns': 'image.icns',
'java': 'sourcecode.java', 'java': 'sourcecode.java',
'js': 'sourcecode.javascript', 'js': 'sourcecode.javascript',
'm': 'sourcecode.c.objc', 'm': 'sourcecode.c.objc',
'mm': 'sourcecode.cpp.objcpp', 'mm': 'sourcecode.cpp.objcpp',
'nib': 'wrapper.nib', 'nib': 'wrapper.nib',
'o': 'compiled.mach-o.objfile', 'o': 'compiled.mach-o.objfile',
'pdf': 'image.pdf', 'pdf': 'image.pdf',
'pl': 'text.script.perl', 'pl': 'text.script.perl',
'plist': 'text.plist.xml', 'plist': 'text.plist.xml',
'pm': 'text.script.perl', 'pm': 'text.script.perl',
'png': 'image.png', 'png': 'image.png',
'py': 'text.script.python', 'py': 'text.script.python',
'r': 'sourcecode.rez', 'r': 'sourcecode.rez',
'rez': 'sourcecode.rez', 'rez': 'sourcecode.rez',
's': 'sourcecode.asm', 's': 'sourcecode.asm',
'strings': 'text.plist.strings', 'strings': 'text.plist.strings',
'ttf': 'file', 'ttf': 'file',
'xcconfig': 'text.xcconfig', 'xcconfig': 'text.xcconfig',
'xib': 'file.xib', 'xcdatamodel': 'wrapper.xcdatamodel',
'y': 'sourcecode.yacc', 'xib': 'file.xib',
'y': 'sourcecode.yacc',
} }
if is_dir: if is_dir:

Loading…
Cancel
Save