Browse Source

gyp: update to 78b26f7

v0.10.23-release
Timothy J Fontaine 11 years ago
parent
commit
4ec189b250
  1. 7
      tools/gyp/PRESUBMIT.py
  2. 42
      tools/gyp/buildbot/buildbot_run.py
  3. 5
      tools/gyp/gyp
  4. 2
      tools/gyp/gyptest.py
  5. 6
      tools/gyp/pylib/gyp/MSVSSettings.py
  6. 5
      tools/gyp/pylib/gyp/MSVSVersion.py
  7. 2
      tools/gyp/pylib/gyp/generator/android.py
  8. 1150
      tools/gyp/pylib/gyp/generator/cmake.py
  9. 13
      tools/gyp/pylib/gyp/generator/make.py
  10. 103
      tools/gyp/pylib/gyp/generator/msvs.py
  11. 67
      tools/gyp/pylib/gyp/generator/ninja.py
  12. 1
      tools/gyp/pylib/gyp/input.py
  13. 246
      tools/gyp/pylib/gyp/mac_tool.py
  14. 64
      tools/gyp/pylib/gyp/msvs_emulation.py
  15. 49
      tools/gyp/pylib/gyp/win_tool.py
  16. 142
      tools/gyp/pylib/gyp/xcode_emulation.py

7
tools/gyp/PRESUBMIT.py

@ -97,14 +97,19 @@ def CheckChangeOnCommit(input_api, output_api):
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import os
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
blacklist = PYLINT_BLACKLIST
if sys.platform == 'win32':
blacklist = [os.path.normpath(x).replace('\\', '\\\\')
for x in PYLINT_BLACKLIST]
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=PYLINT_BLACKLIST,
black_list=blacklist,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally:
sys.path = old_sys_path

42
tools/gyp/buildbot/buildbot_run.py

@ -23,6 +23,8 @@ BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
ROOT_DIR = os.path.dirname(TRUNK_DIR)
ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
OUT_DIR = os.path.join(TRUNK_DIR, 'out')
@ -34,6 +36,43 @@ def CallSubProcess(*args, **kwargs):
sys.exit(1)
def PrepareCmake():
"""Build CMake 2.8.8 since the version in Precise is 2.8.7."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
print '@@@BUILD_STEP Clobber CMake checkout@@@'
shutil.rmtree(CMAKE_DIR)
# We always build CMake 2.8.8, so no need to do anything
# if the directory already exists.
if os.path.isdir(CMAKE_DIR):
return
print '@@@BUILD_STEP Initialize CMake checkout@@@'
os.mkdir(CMAKE_DIR)
CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
CallSubProcess(['git', 'config', '--global',
'user.email', 'chrome-bot@google.com'])
CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
print '@@@BUILD_STEP Sync CMake@@@'
CallSubProcess(
['git', 'clone',
'--depth', '1',
'--single-branch',
'--branch', 'v2.8.8',
'--',
'git://cmake.org/cmake.git',
CMAKE_DIR],
cwd=CMAKE_DIR)
print '@@@BUILD_STEP Build CMake@@@'
CallSubProcess(
['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
cwd=CMAKE_DIR)
CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
def PrepareAndroidTree():
"""Prepare an Android tree to run 'android' format tests."""
if os.environ['BUILDBOT_CLOBBER'] == '1':
@ -91,6 +130,7 @@ def GypTestFormat(title, format=None, msvs_version=None):
'--all',
'--passed',
'--format', format,
'--path', CMAKE_BIN_DIR,
'--chdir', 'trunk'])
if format == 'android':
# gyptest needs the environment setup from envsetup/lunch in order to build
@ -124,6 +164,8 @@ def GypBuild():
elif sys.platform.startswith('linux'):
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('make')
PrepareCmake()
retcode += GypTestFormat('cmake')
elif sys.platform == 'darwin':
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('xcode')

5
tools/gyp/gyp

@ -3,5 +3,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
BASE=`dirname $0`
python $BASE/gyp_main.py "$@"
set -e
base=$(dirname "$0")
exec python "${base}/gyp_main.py" "$@"

2
tools/gyp/gyptest.py

@ -176,7 +176,7 @@ def main(argv=None):
if opts.path:
extra_path = [os.path.abspath(p) for p in opts.path]
extra_path = os.pathsep.join(extra_path)
os.environ['PATH'] += os.pathsep + extra_path
os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
if not args:
if not opts.all:

6
tools/gyp/pylib/gyp/MSVSSettings.py

@ -812,6 +812,8 @@ _Same(_link, 'UACExecutionLevel',
_Enumeration(['AsInvoker', # /level='asInvoker'
'HighestAvailable', # /level='highestAvailable'
'RequireAdministrator'])) # /level='requireAdministrator'
_Same(_link, 'MinimumRequiredVersion', _string)
_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
# Options found in MSVS that have been renamed in MSBuild.
@ -850,8 +852,6 @@ _MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
_MSBuildOnly(_link, 'MinimumRequiredVersion', _string)
_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
@ -985,6 +985,7 @@ _Same(_lib, 'OutputFile', _file_name) # /OUT
_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
_Same(_lib, 'TargetMachine', _target_machine_enumeration)
# TODO(jeanluc) _link defines the same value that gets moved to
# ProjectReference. We may want to validate that they are consistent.
@ -1003,7 +1004,6 @@ _MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
_MSBuildOnly(_lib, 'TargetMachine', _target_machine_enumeration)
_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
_MSBuildOnly(_lib, 'Verbose', _boolean)

5
tools/gyp/pylib/gyp/MSVSVersion.py

@ -10,6 +10,7 @@ import re
import subprocess
import sys
import gyp
import glob
class VisualStudioVersion(object):
@ -339,13 +340,13 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
path = _ConvertToCygpath(path)
# Check for full.
full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, 'vcexpress.exe')
express_path = os.path.join(path, '*express.exe')
if not force_express and os.path.exists(full_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express.
elif os.path.exists(express_path):
elif glob.glob(express_path):
# Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))

2
tools/gyp/pylib/gyp/generator/android.py

@ -452,7 +452,7 @@ class AndroidMkWriter(object):
(output, path))
self.WriteLn('\t@echo Copying: $@')
self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
self.WriteLn('\t$(hide) $(ACP) -r $< $@')
self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
self.WriteLn()
outputs.append(output)
self.WriteLn('%s = %s' % (variable,

1150
tools/gyp/pylib/gyp/generator/cmake.py

File diff suppressed because it is too large

13
tools/gyp/pylib/gyp/generator/make.py

@ -57,6 +57,7 @@ generator_wants_sorted_dependencies = False
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
def CalculateVariables(default_variables, params):
@ -103,6 +104,18 @@ def CalculateGeneratorInputInfo(params):
global generator_wants_sorted_dependencies
generator_wants_sorted_dependencies = True
output_dir = params['options'].generator_output or \
params['options'].toplevel_dir
builddir_name = generator_flags.get('output_dir', 'out')
qualified_out_dir = os.path.normpath(os.path.join(
output_dir, builddir_name, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': params['options'].toplevel_dir,
'qualified_out_dir': qualified_out_dir,
}
def ensure_directory_exists(path):
dir = os.path.dirname(path)

103
tools/gyp/pylib/gyp/generator/msvs.py

@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import ntpath
import os
@ -86,6 +87,46 @@ cached_username = None
cached_domain = None
# Based on http://code.activestate.com/recipes/576694/.
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def update(self, iterable):
for i in iterable:
if i not in self:
self.add(i)
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
@ -179,7 +220,7 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
if not prefix: prefix = []
result = []
excluded_result = []
folders = dict()
folders = collections.OrderedDict()
# Gather files into the final result, excluded, or folders.
for s in sources:
if len(s) == 1:
@ -415,13 +456,13 @@ def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
dicts describing the actions attached to that input file.
"""
for primary_input in actions_dict:
inputs = set()
outputs = set()
inputs = OrderedSet()
outputs = OrderedSet()
descriptions = []
commands = []
for action in actions_dict[primary_input]:
inputs.update(set(action['inputs']))
outputs.update(set(action['outputs']))
inputs.update(OrderedSet(action['inputs']))
outputs.update(OrderedSet(action['outputs']))
descriptions.append(action['description'])
commands.append(action['command'])
# Add the custom build step for one input file.
@ -477,8 +518,8 @@ def _RuleInputsAndOutputs(rule, trigger_file):
"""
raw_inputs = _FixPaths(rule.get('inputs', []))
raw_outputs = _FixPaths(rule.get('outputs', []))
inputs = set()
outputs = set()
inputs = OrderedSet()
outputs = OrderedSet()
inputs.add(trigger_file)
for i in raw_inputs:
inputs.add(_RuleExpandPath(i, trigger_file))
@ -549,16 +590,16 @@ def _GenerateExternalRules(rules, output_dir, spec,
mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
# Gather stuff needed to emit all: target.
all_inputs = set()
all_outputs = set()
all_output_dirs = set()
all_inputs = OrderedSet()
all_outputs = OrderedSet()
all_output_dirs = OrderedSet()
first_outputs = []
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
all_inputs.update(set(inputs))
all_outputs.update(set(outputs))
all_inputs.update(OrderedSet(inputs))
all_outputs.update(OrderedSet(outputs))
# Only use one target from each rule as the dependency for
# 'all' so we don't try to build each rule multiple times.
first_outputs.append(list(outputs)[0])
@ -799,8 +840,8 @@ def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
trigger_files = _FindRuleTriggerFiles(rule, sources)
for trigger_file in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
inputs = set(_FixPaths(inputs))
outputs = set(_FixPaths(outputs))
inputs = OrderedSet(_FixPaths(inputs))
outputs = OrderedSet(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
if not spec.get('msvs_external_builder'):
@ -817,7 +858,7 @@ def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
Returns:
excluded_sources with files that have actions attached removed.
"""
must_keep = set(_FixPaths(actions_to_add.keys()))
must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
return [s for s in excluded_sources if s not in must_keep]
@ -965,7 +1006,7 @@ def _GetUniquePlatforms(spec):
The MSVSUserFile object created.
"""
# Gather list of unique platforms.
platforms = set()
platforms = OrderedSet()
for configuration in spec['configurations']:
platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
platforms = list(platforms)
@ -1152,7 +1193,7 @@ def _GetLibraries(spec):
# in libraries that are assumed to be in the default library path).
# Also remove duplicate entries, leaving only the last duplicate, while
# preserving order.
found = set()
found = OrderedSet()
unique_libraries_list = []
for entry in reversed(libraries):
library = re.sub('^\-l', '', entry)
@ -1331,8 +1372,7 @@ def _GetMSVSAttributes(spec, config, config_type):
def _AddNormalizedSources(sources_set, sources_array):
sources = [_NormalizedSource(s) for s in sources_array]
sources_set.update(set(sources))
sources_set.update(_NormalizedSource(s) for s in sources_array)
def _PrepareListOfSources(spec, generator_flags, gyp_file):
@ -1350,9 +1390,9 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
A pair of (list of sources, list of excluded sources).
The sources will be relative to the gyp file.
"""
sources = set()
sources = OrderedSet()
_AddNormalizedSources(sources, spec.get('sources', []))
excluded_sources = set()
excluded_sources = OrderedSet()
# Add in the gyp file.
if not generator_flags.get('standalone'):
sources.add(gyp_file)
@ -1362,7 +1402,7 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
inputs = a['inputs']
inputs = [_NormalizedSource(i) for i in inputs]
# Add all inputs to sources and excluded sources.
inputs = set(inputs)
inputs = OrderedSet(inputs)
sources.update(inputs)
if not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
@ -1391,7 +1431,7 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
path of excluded IDL file)
"""
# Exclude excluded sources coming into the generator.
excluded_sources.update(set(spec.get('sources_excluded', [])))
excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
# Add excluded sources into sources for good measure.
sources.update(excluded_sources)
# Convert to proper windows form.
@ -1412,6 +1452,11 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
list_excluded=list_excluded)
# Prune filters with a single child to flatten ugly directory structures
# such as ../../src/modules/module1 etc.
while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
sources = sources[0].contents
return sources, excluded_sources, excluded_idl
@ -1479,7 +1524,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
def _AddToolFilesToMSVS(p, spec):
# Add in tool files (rules).
tool_files = set()
tool_files = OrderedSet()
for _, config in spec['configurations'].iteritems():
for f in config.get('msvs_tool_files', []):
tool_files.add(f)
@ -3202,16 +3247,16 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
Returns:
A pair of (action specification, the sources handled by this action).
"""
sources_handled_by_action = set()
sources_handled_by_action = OrderedSet()
actions_spec = []
for primary_input, actions in actions_to_add.iteritems():
inputs = set()
outputs = set()
inputs = OrderedSet()
outputs = OrderedSet()
descriptions = []
commands = []
for action in actions:
inputs.update(set(action['inputs']))
outputs.update(set(action['outputs']))
inputs.update(OrderedSet(action['inputs']))
outputs.update(OrderedSet(action['outputs']))
descriptions.append(action['description'])
cmd = action['command']
# For most actions, add 'call' so that actions that invoke batch files

67
tools/gyp/pylib/gyp/generator/ninja.py

@ -814,15 +814,18 @@ class NinjaWriter:
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
pdbpath = self.msvs_settings.GetCompilerPdbName(
# See comment at cc_command for why there's two .pdb files.
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath:
if not pdbpath_c:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir,
self.name + '.pdb'))
self.WriteVariableList(ninja_file, 'pdbname', [pdbpath])
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
pdbpath_c = pdbpath + '.c.pdb'
pdbpath_cc = pdbpath + '.cc.pdb'
self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
@ -1578,18 +1581,24 @@ def _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental):
def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
cmd = ('cmd /c %(ldcmd)s'
' && %(python)s gyp-win-tool manifest-wrapper $arch'
' cmd /c if exist %(out)s.manifest del %(out)s.manifest'
' && %(python)s gyp-win-tool manifest-wrapper $arch'
' $mt -nologo -manifest $manifests')
"""Returns a one-liner written for cmd.exe to handle multiphase linker
operations including manifest file generation. The command will be
structured as follows:
cmd /c (linkcmd1 a b) && (linkcmd2 x y) && ... &&
if not "$manifests"=="" ((manifestcmd1 a b) && (manifestcmd2 x y) && ... )
Note that $manifests becomes empty when no manifest file is generated."""
link_commands = ['%(ldcmd)s',
'if exist %(out)s.manifest del %(out)s.manifest']
mt_cmd = ('%(python)s gyp-win-tool manifest-wrapper'
' $arch $mt -nologo -manifest $manifests')
if embed_manifest and not link_incremental:
# Embed manifest into a binary. If incremental linking is enabled,
# embedding is postponed to the re-linking stage (see below).
cmd += ' -outputresource:%(out)s;%(resname)s'
mt_cmd += ' -outputresource:%(out)s;%(resname)s'
else:
# Save manifest as an external file.
cmd += ' -out:%(out)s.manifest'
mt_cmd += ' -out:%(out)s.manifest'
manifest_commands = [mt_cmd]
if link_incremental:
# There is no point in generating separate rule for the case when
# incremental linking is enabled, but manifest embedding is disabled.
@ -1597,11 +1606,14 @@ def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental):
# See also implementation of _GetWinLinkRuleNameSuffix().
assert embed_manifest
# Make .rc file out of manifest, compile it to .res file and re-link.
cmd += (' && %(python)s gyp-win-tool manifest-to-rc $arch'
' %(out)s.manifest %(out)s.manifest.rc %(resname)s'
' && %(python)s gyp-win-tool rc-wrapper $arch $rc'
' %(out)s.manifest.rc'
' && %(ldcmd)s %(out)s.manifest.res')
manifest_commands += [
('%(python)s gyp-win-tool manifest-to-rc $arch %(out)s.manifest'
' %(out)s.manifest.rc %(resname)s'),
'%(python)s gyp-win-tool rc-wrapper $arch $rc %(out)s.manifest.rc',
'%(ldcmd)s %(out)s.manifest.res']
cmd = 'cmd /c %s && if not "$manifests"=="" (%s)' % (
' && '.join(['(%s)' % c for c in link_commands]),
' && '.join(['(%s)' % c for c in manifest_commands]))
resource_name = {
'exe': '1',
'dll': '2',
@ -1656,9 +1668,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja = ninja_syntax.Writer(
OpenOutput(os.path.join(toplevel_build, 'build.ninja')),
width=120)
master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
@ -1679,8 +1690,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
ld = 'link.exe'
ld_host = '$ld'
else:
cc = 'gcc'
cxx = 'g++'
cc = 'cc'
cxx = 'c++'
ld = '$cc'
ldxx = '$cxx'
ld_host = '$cc_host'
@ -1798,14 +1809,20 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
depfile='$out.d',
deps=deps)
else:
# TODO(scottmg) Separate pdb names is a test to see if it works around
# http://crbug.com/142362. It seems there's a race between the creation of
# the .pdb by the precompiled header step for .cc and the compilation of
# .c files. This should be handled by mspdbsrv, but rarely errors out with
# c1xx : fatal error C1033: cannot open program database
# By making the rules target separate pdb files this might be avoided.
cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
'@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
'@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
master_ninja.rule(
'cc',
description='CC $out',
@ -2098,6 +2115,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
def PerformBuild(data, configurations, params):
options = params['options']

1
tools/gyp/pylib/gyp/input.py

@ -2278,6 +2278,7 @@ def ProcessListFiltersInDict(name, the_dict):
continue
if not isinstance(the_dict[list_key], list):
value = the_dict[list_key]
raise ValueError, name + ' key ' + list_key + \
' must be list, not ' + \
value.__class__.__name__ + ' when applying ' + \

246
tools/gyp/pylib/gyp/mac_tool.py

@ -9,6 +9,8 @@ These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
import fnmatch
import glob
import json
import os
import plistlib
@ -17,6 +19,7 @@ import shutil
import string
import subprocess
import sys
import tempfile
def main(args):
@ -259,6 +262,249 @@ class MacTool(object):
os.remove(link)
os.symlink(dest, link)
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
1. copy ResourceRules.plist from the user or the SDK into the bundle,
2. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
3. copy Entitlements.plist from user or SDK next to the bundle,
4. code sign the bundle.
"""
resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
'codesign', '--force', '--sign', key, '--resource-rules',
resource_rules_path, '--entitlements', entitlements_path,
os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
def _InstallResourceRules(self, resource_rules):
"""Installs ResourceRules.plist from user or SDK into the bundle.
Args:
resource_rules: string, optional, path to the ResourceRules.plist file
to use, default to "${SDKROOT}/ResourceRules.plist"
Returns:
Path to the copy of ResourceRules.plist into the bundle.
"""
source_path = resource_rules
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'ResourceRules.plist')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'], 'ResourceRules.plist')
shutil.copy2(source_path, target_path)
return target_path
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple containing two dictionary: variables substitutions and values
to overrides when generating the entitlements file.
"""
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
profile, bundle_identifier)
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['CONTENTS_FOLDER_PATH'],
'embedded.mobileprovision')
shutil.copy2(source_path, target_path)
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
return substitutions, provisioning_data['Entitlements']
def _FindProvisioningProfile(self, profile, bundle_identifier):
"""Finds the .mobileprovision file to use for signing the bundle.
Checks all the installed provisioning profiles (or if the user specified
the PROVISIONING_PROFILE variable, only consult it) and select the most
specific that correspond to the bundle identifier.
Args:
profile: string, optional, short name of the .mobileprovision file
to use, if empty or the file is missing, the best file installed
will be used
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
Returns:
A tuple of the path to the selected provisioning profile, the data of
the embedded plist in the provisioning profile and the team identifier
to use for code signing.
Raises:
SystemExit: if no .mobileprovision can be used to sign the bundle.
"""
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
provisioning_profiles = None
if profile:
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
if os.path.exists(profile_path):
provisioning_profiles = [profile_path]
if not provisioning_profiles:
provisioning_profiles = glob.glob(
os.path.join(profiles_dir, '*.mobileprovision'))
valid_provisioning_profiles = {}
for profile_path in provisioning_profiles:
profile_data = self._LoadProvisioningProfile(profile_path)
app_id_pattern = profile_data.get(
'Entitlements', {}).get('application-identifier', '')
for team_identifier in profile_data.get('TeamIdentifier', []):
app_id = '%s.%s' % (team_identifier, bundle_identifier)
if fnmatch.fnmatch(app_id, app_id_pattern):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
print >>sys.stderr, (
'cannot find mobile provisioning for %s' % bundle_identifier)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
# provisioning profile whose pattern is the longest).
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
return valid_provisioning_profiles[selected_key]
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
def _LoadPlistMaybeBinary(self, plist_path):
"""Loads into a memory a plist possibly encoded in binary format.
This is a wrapper around plistlib.readPlist that tries to convert the
plist to the XML format if it can't be parsed (assuming that it is in
the binary format).
Args:
plist_path: string, path to a plist file, in XML or binary format
Returns:
Content of the plist as a dictionary.
"""
try:
# First, try to read the file using plistlib that only supports XML,
# and if an exception is raised, convert a temporary copy to XML and
# load that copy.
return plistlib.readPlist(plist_path)
except:
pass
with tempfile.NamedTemporaryFile() as temp:
shutil.copy2(plist_path, temp.name)
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
return plistlib.readPlist(temp.name)
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
Args:
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
app_identifier_prefix: string, value for AppIdentifierPrefix
Returns:
Dictionary of substitutions to apply when generating Entitlements.plist.
"""
return {
'CFBundleIdentifier': bundle_identifier,
'AppIdentifierPrefix': app_identifier_prefix,
}
def _GetCFBundleIdentifier(self):
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
Returns:
Value of CFBundleIdentifier in the Info.plist located in the bundle.
"""
info_plist_path = os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['INFOPLIST_PATH'])
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
return info_plist_data['CFBundleIdentifier']
def _InstallEntitlements(self, entitlements, substitutions, overrides):
"""Generates and install the ${BundleName}.xcent entitlements file.
Expands variables "$(variable)" pattern in the source entitlements file,
add extra entitlements defined in the .mobileprovision file and the copy
the generated plist to "${BundlePath}.xcent".
Args:
entitlements: string, optional, path to the Entitlements.plist template
to use, defaults to "${SDKROOT}/Entitlements.plist"
substitutions: dictionary, variable substitutions
overrides: dictionary, values to add to the entitlements
Returns:
Path to the generated entitlements file.
"""
source_path = entitlements
target_path = os.path.join(
os.environ['BUILT_PRODUCTS_DIR'],
os.environ['PRODUCT_NAME'] + '.xcent')
if not source_path:
source_path = os.path.join(
os.environ['SDKROOT'],
'Entitlements.plist')
shutil.copy2(source_path, target_path)
data = self._LoadPlistMaybeBinary(target_path)
data = self._ExpandVariables(data, substitutions)
if overrides:
for key in overrides:
if key not in data:
data[key] = overrides[key]
plistlib.writePlist(data, target_path)
return target_path
def _ExpandVariables(self, data, substitutions):
"""Expands variables "$(variable)" in data.
Args:
data: object, can be either string, list or dictionary
substitutions: dictionary, variable substitutions to perform
Returns:
Copy of data where each references to "$(variable)" has been replaced
by the corresponding value found in substitutions, or left intact if
the key was not found.
"""
if isinstance(data, str):
for key, value in substitutions.iteritems():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
return [self._ExpandVariables(v, substitutions) for v in data]
if isinstance(data, dict):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

64
tools/gyp/pylib/gyp/msvs_emulation.py

@ -420,6 +420,7 @@ class MsvsSettings(object):
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
@ -441,6 +442,17 @@ class MsvsSettings(object):
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, is_executable):
"""Returns the flags that need to be added to link commands, and the
@ -455,20 +467,35 @@ class MsvsSettings(object):
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special)
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
@ -476,7 +503,10 @@ class MsvsSettings(object):
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration', map={'1': '/LTCG'})
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
@ -501,18 +531,26 @@ class MsvsSettings(object):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest_file = self._GetLdManifestFlags(
config, manifest_base_name, is_executable and not have_def_file)
manifest_flags, manifest_files = self._GetLdManifestFlags(
config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file)
ldflags.extend(manifest_flags)
manifest_files = self._GetAdditionalManifestFiles(config, gyp_to_build_path)
manifest_files.append(intermediate_manifest_file)
return ldflags, manifest_files
def _GetLdManifestFlags(self, config, name, allow_isolation):
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation):
"""Returns the set of flags that need to be added to the link to generate
a default manifest, as well as the name of the generated file."""
# The manifest is generated by default.
a default manifest, as well as the list of all the manifest files to be
merged by the manifest tool."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
@ -540,7 +578,11 @@ class MsvsSettings(object):
if allow_isolation:
flags.append('/ALLOWISOLATION')
return flags, output_name
manifest_files = [output_name]
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one

49
tools/gyp/pylib/gyp/win_tool.py

@ -10,12 +10,16 @@ These functions are executed via gyp-win-tool when using the ninja generator.
"""
import os
import re
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# A regex matching an argument corresponding to a PDB filename passed as an
# argument to link.exe.
_LINK_EXE_PDB_ARG = re.compile('/PDB:(?P<pdb>.+\.exe\.pdb)$', re.IGNORECASE)
def main(args):
executor = WinTool()
@ -28,6 +32,35 @@ class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def _MaybeUseSeparateMspdbsrv(self, env, args):
"""Allows to use a unique instance of mspdbsrv.exe for the linkers linking
an .exe target if GYP_USE_SEPARATE_MSPDBSRV has been set."""
if not os.environ.get('GYP_USE_SEPARATE_MSPDBSRV'):
return
if len(args) < 1:
raise Exception("Not enough arguments")
if args[0] != 'link.exe':
return
# Checks if this linker produces a PDB for an .exe target. If so use the
# name of this PDB to generate an endpoint name for mspdbsrv.exe.
endpoint_name = None
for arg in args:
m = _LINK_EXE_PDB_ARG.match(arg)
if m:
endpoint_name = '%s_%d' % (m.group('pdb'), os.getpid())
break
if endpoint_name is None:
return
# Adds the appropriate environment variable. This will be read by link.exe
# to know which instance of mspdbsrv.exe it should connect to (if it's
# not set then the default endpoint is used).
env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
@ -71,13 +104,17 @@ class WinTool(object):
This happens when there are exports from the dll or exe.
"""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
self._MaybeUseSeparateMspdbsrv(env, args)
link = subprocess.Popen(args,
shell=True,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, _ = link.communicate()
for line in out.splitlines():
if not line.startswith(' Creating library '):
print line
return popen.returncode
return link.returncode
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
@ -168,9 +205,7 @@ class WinTool(object):
env = self._GetEnv(arch)
args = open(rspfile).read()
dir = dir[0] if dir else None
popen = subprocess.Popen(args, shell=True, env=env, cwd=dir)
popen.wait()
return popen.returncode
return subprocess.call(args, shell=True, env=env, cwd=dir)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

142
tools/gyp/pylib/gyp/xcode_emulation.py

@ -9,11 +9,13 @@ other build systems, such as make and ninja.
import copy
import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
import tempfile
from gyp.common import GypError
class XcodeSettings(object):
@ -22,6 +24,7 @@ class XcodeSettings(object):
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
@ -31,6 +34,10 @@ class XcodeSettings(object):
# cached at class-level for efficiency.
_codesigning_key_cache = {}
# Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_xcode_version_cache = ()
def __init__(self, spec):
self.spec = spec
@ -262,7 +269,7 @@ class XcodeSettings(object):
"""Returns the architectures this target should be built for."""
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
# CURRENT_ARCH / NATIVE_ARCH env vars?
return self.xcode_settings[configname].get('ARCHS', ['i386'])
return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
def _GetStdout(self, cmdlist):
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
@ -284,9 +291,14 @@ class XcodeSettings(object):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
return self._XcodeSdkPath(sdk_root)
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
sdk_root, 'Path')
sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
@ -377,7 +389,7 @@ class XcodeSettings(object):
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@ -630,7 +642,7 @@ class XcodeSettings(object):
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@ -780,33 +792,38 @@ class XcodeSettings(object):
if not (self.isIOS and self.spec['type'] == "executable"):
return []
identity = self.xcode_settings[configname].get('CODE_SIGN_IDENTITY', '')
if identity == '':
settings = self.xcode_settings[configname]
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
return []
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
if not identity:
return None
if identity not in XcodeSettings._codesigning_key_cache:
proc = subprocess.Popen(['security', 'find-identity', '-p', 'codesigning',
'-v'], stdout=subprocess.PIPE)
output = proc.communicate()[0].strip()
key = None
for item in output.split("\n"):
if identity in item:
assert key == None, (
"Multiple codesigning identities for identity: %s" %
identity)
key = item.split(' ')[1]
XcodeSettings._codesigning_key_cache[identity] = key
key = XcodeSettings._codesigning_key_cache[identity]
if key:
# Warn for any unimplemented signing xcode keys.
unimpl = ['CODE_SIGN_RESOURCE_RULES_PATH', 'OTHER_CODE_SIGN_FLAGS',
'CODE_SIGN_ENTITLEMENTS']
keys = set(self.xcode_settings[configname].keys())
unimpl = set(unimpl) & keys
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring:', \
' '.join(unimpl)
return ['codesign --force --sign %s %s' % (key, output_binary)]
return []
output = subprocess.check_output(
['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines():
if identity in line:
assert identity not in XcodeSettings._codesigning_key_cache, (
"Multiple codesigning identities for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = line.split()[1]
return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
@ -848,14 +865,16 @@ class XcodeSettings(object):
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
build = build.split()[-1]
return version, build
if len(XcodeSettings._xcode_version_cache) == 0:
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
build = build.split()[-1]
XcodeSettings._xcode_version_cache = (version, build)
return XcodeSettings._xcode_version_cache
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
@ -872,6 +891,8 @@ class XcodeSettings(object):
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
@ -896,6 +917,51 @@ class XcodeSettings(object):
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
if self._XcodeVersion() < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return ''
def _DefaultArch(self):
# For Mac projects, Xcode changed the default value used when ARCHS is not
# set from "i386" to "x86_64".
#
# For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
# building for a device, and the simulator binaries are always build for
# "i386".
#
# For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
# which correspond to "armv7 armv7s arm64", and when building the simulator
# the architecture is either "i386" or "x86_64" depending on the simulated
# device (respectively 32-bit or 64-bit device).
#
# Since the value returned by this function is only used when ARCHS is not
# set, then on iOS we return "i386", as the default xcode project generator
# does not set ARCHS if it is not set in the .gyp file.
if self.isIOS:
return 'i386'
version, build = self._XcodeVersion()
if version >= '0500':
return 'x86_64'
return 'i386'
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.

Loading…
Cancel
Save