Browse Source

deps: update gyp to 828ce09

v0.11.10-release
Fedor Indutny 11 years ago
parent
commit
96dffb1217
  1. 8
      tools/gyp/pylib/gyp/common.py
  2. 4
      tools/gyp/pylib/gyp/generator/android.py
  3. 9
      tools/gyp/pylib/gyp/generator/cmake.py
  4. 6
      tools/gyp/pylib/gyp/generator/eclipse.py
  5. 17
      tools/gyp/pylib/gyp/generator/make.py
  6. 35
      tools/gyp/pylib/gyp/generator/msvs.py
  7. 87
      tools/gyp/pylib/gyp/generator/ninja.py
  8. 3
      tools/gyp/pylib/gyp/input.py
  9. 74
      tools/gyp/pylib/gyp/msvs_emulation.py
  10. 287
      tools/gyp/pylib/gyp/ordered_dict.py
  11. 77
      tools/gyp/pylib/gyp/win_tool.py
  12. 86
      tools/gyp/pylib/gyp/xcode_emulation.py

8
tools/gyp/pylib/gyp/common.py

@ -391,6 +391,14 @@ def WriteOnDiff(filename):
return Writer() return Writer()
def EnsureDirExists(path):
"""Make sure the directory for |path| exists."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
def GetFlavor(params): def GetFlavor(params):
"""Returns |params.flavor| if it's set, the system's default flavor else.""" """Returns |params.flavor| if it's set, the system's default flavor else."""
flavors = { flavors = {

4
tools/gyp/pylib/gyp/generator/android.py

@ -145,7 +145,7 @@ class AndroidMkWriter(object):
spec, configs: gyp info spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all' part_of_all: flag indicating this target is part of 'all'
""" """
make.ensure_directory_exists(output_filename) gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w') self.fp = open(output_filename, 'w')
@ -983,7 +983,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
makefile_path = os.path.join(options.toplevel_dir, makefile_name) makefile_path = os.path.join(options.toplevel_dir, makefile_name)
assert not options.generator_output, ( assert not options.generator_output, (
'The Android backend does not support options.generator_output.') 'The Android backend does not support options.generator_output.')
make.ensure_directory_exists(makefile_path) gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w') root_makefile = open(makefile_path, 'w')
root_makefile.write(header) root_makefile.write(header)

9
tools/gyp/pylib/gyp/generator/cmake.py

@ -118,13 +118,6 @@ def NormjoinPath(base_path, rel_path):
return os.path.normpath(os.path.join(base_path, rel_path)) return os.path.normpath(os.path.join(base_path, rel_path))
def EnsureDirectoryExists(path):
"""Python version of 'mkdir -p'."""
dirPath = os.path.dirname(path)
if dirPath and not os.path.exists(dirPath):
os.makedirs(dirPath)
def CMakeStringEscape(a): def CMakeStringEscape(a):
"""Escapes the string 'a' for use inside a CMake string. """Escapes the string 'a' for use inside a CMake string.
@ -1041,7 +1034,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
toplevel_build = os.path.join(options.toplevel_dir, build_dir) toplevel_build = os.path.join(options.toplevel_dir, build_dir)
output_file = os.path.join(toplevel_build, 'CMakeLists.txt') output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
EnsureDirectoryExists(output_file) gyp.common.EnsureDirExists(output_file)
output = open(output_file, 'w') output = open(output_file, 'w')
output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n') output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')

6
tools/gyp/pylib/gyp/generator/eclipse.py

@ -270,9 +270,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
os.path.join(toplevel_build, 'gen')] os.path.join(toplevel_build, 'gen')]
if not os.path.exists(toplevel_build): out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml')
os.makedirs(toplevel_build) gyp.common.EnsureDirExists(out_name)
out = open(os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), 'w') out = open(out_name, 'w')
out.write('<?xml version="1.0" encoding="UTF-8"?>\n') out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
out.write('<cdtprojectproperties>\n') out.write('<cdtprojectproperties>\n')

17
tools/gyp/pylib/gyp/generator/make.py

@ -117,12 +117,6 @@ def CalculateGeneratorInputInfo(params):
} }
def ensure_directory_exists(path):
dir = os.path.dirname(path)
if dir and not os.path.exists(dir):
os.makedirs(dir)
# The .d checking code below uses these functions: # The .d checking code below uses these functions:
# wildcard, sort, foreach, shell, wordlist # wildcard, sort, foreach, shell, wordlist
# wildcard can handle spaces, the rest can't. # wildcard can handle spaces, the rest can't.
@ -691,7 +685,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
spec, configs: gyp info spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all' part_of_all: flag indicating this target is part of 'all'
""" """
ensure_directory_exists(output_filename) gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w') self.fp = open(output_filename, 'w')
@ -820,7 +814,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
targets: list of "all" targets for this sub-project targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project build_dir: build output directory, relative to the sub-project
""" """
ensure_directory_exists(output_filename) gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w') self.fp = open(output_filename, 'w')
self.fp.write(header) self.fp.write(header)
# For consistency with other builders, put sub-project build output in the # For consistency with other builders, put sub-project build output in the
@ -2056,8 +2050,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
make_global_settings += ( make_global_settings += (
'ifneq (,$(filter $(origin %s), undefined default))\n' % key) 'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
# Let gyp-time envvars win over global settings. # Let gyp-time envvars win over global settings.
if key in os.environ: env_key = key.replace('.', '_') # CC.host -> CC_host
value = os.environ[key] if env_key in os.environ:
value = os.environ[env_key]
make_global_settings += ' %s = %s\n' % (key, value) make_global_settings += ' %s = %s\n' % (key, value)
make_global_settings += 'endif\n' make_global_settings += 'endif\n'
else: else:
@ -2067,7 +2062,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
header_params['make_global_settings'] = make_global_settings header_params['make_global_settings'] = make_global_settings
ensure_directory_exists(makefile_path) gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w') root_makefile = open(makefile_path, 'w')
root_makefile.write(SHARED_HEADER % header_params) root_makefile.write(SHARED_HEADER % header_params)
# Currently any versions have the same effect, but in future the behavior # Currently any versions have the same effect, but in future the behavior

35
tools/gyp/pylib/gyp/generator/msvs.py

@ -22,6 +22,16 @@ import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError from gyp.common import GypError
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
import gyp.ordered_dict
return gyp.ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
# Regular expression for validating Visual Studio GUIDs. If the GUID # Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However, # contains lowercase hex letters, MSVS will be fine. However,
@ -220,7 +230,6 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
if not prefix: prefix = [] if not prefix: prefix = []
result = [] result = []
excluded_result = [] excluded_result = []
folders = collections.OrderedDict()
# Gather files into the final result, excluded, or folders. # Gather files into the final result, excluded, or folders.
for s in sources: for s in sources:
if len(s) == 1: if len(s) == 1:
@ -230,22 +239,16 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
else: else:
result.append(filename) result.append(filename)
else: else:
if not folders.get(s[0]): contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
folders[s[0]] = [] excluded=excluded,
folders[s[0]].append(s[1:]) list_excluded=list_excluded)
contents = MSVSProject.Filter(s[0], contents=contents)
result.append(contents)
# Add a folder for excluded files. # Add a folder for excluded files.
if excluded_result and list_excluded: if excluded_result and list_excluded:
excluded_folder = MSVSProject.Filter('_excluded_files', excluded_folder = MSVSProject.Filter('_excluded_files',
contents=excluded_result) contents=excluded_result)
result.append(excluded_folder) result.append(excluded_folder)
# Populate all the folders.
for f in folders:
contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
excluded=excluded,
list_excluded=list_excluded)
contents = MSVSProject.Filter(f, contents=contents)
result.append(contents)
return result return result
@ -941,9 +944,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
generator_flags: dict of generator-specific flags. generator_flags: dict of generator-specific flags.
""" """
spec = project.spec spec = project.spec
vcproj_dir = os.path.dirname(project.path) gyp.common.EnsureDirExists(project.path)
if vcproj_dir and not os.path.exists(vcproj_dir):
os.makedirs(vcproj_dir)
platforms = _GetUniquePlatforms(spec) platforms = _GetUniquePlatforms(spec)
p = MSVSProject.Writer(project.path, version, spec['target_name'], p = MSVSProject.Writer(project.path, version, spec['target_name'],
@ -3096,9 +3097,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
spec = project.spec spec = project.spec
configurations = spec['configurations'] configurations = spec['configurations']
project_dir, project_file_name = os.path.split(project.path) project_dir, project_file_name = os.path.split(project.path)
msbuildproj_dir = os.path.dirname(project.path) gyp.common.EnsureDirExists(project.path)
if msbuildproj_dir and not os.path.exists(msbuildproj_dir):
os.makedirs(msbuildproj_dir)
# Prepare list of sources and excluded sources. # Prepare list of sources and excluded sources.
gyp_path = _NormalizedSource(project.build_file) gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)

87
tools/gyp/pylib/gyp/generator/ninja.py

@ -1039,13 +1039,18 @@ class NinjaWriter:
elif self.flavor == 'win': elif self.flavor == 'win':
manifest_name = self.GypPathToUniqueOutput( manifest_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec)) self.ComputeOutputFileName(spec))
ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name, ldflags, intermediate_manifest, manifest_files = \
self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable) self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
self.ExpandSpecial, manifest_name,
is_executable, self.toplevel_build)
ldflags = env_ldflags + ldflags ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files) self.WriteVariableList(ninja_file, 'manifests', manifest_files)
implicit_deps = implicit_deps.union(manifest_files)
if intermediate_manifest:
self.WriteVariableList(
ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix( command_suffix = _GetWinLinkRuleNameSuffix(
self.msvs_settings.IsEmbedManifest(config_name), self.msvs_settings.IsEmbedManifest(config_name))
self.msvs_settings.IsLinkIncremental(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file: if def_file:
implicit_deps.add(def_file) implicit_deps.add(def_file)
@ -1505,10 +1510,7 @@ def CalculateGeneratorInputInfo(params):
def OpenOutput(path, mode='w'): def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary.""" """Open |path| for writing, creating directories if necessary."""
try: gyp.common.EnsureDirExists(path)
os.makedirs(os.path.dirname(path))
except OSError:
pass
return open(path, mode) return open(path, mode)
@ -1567,63 +1569,28 @@ def GetDefaultConcurrentLinks():
return 1 return 1
def _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental): def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on """Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding and/or incremental linking is enabled.""" whether the manifest embedding is enabled."""
suffix = '' return '_embed' if embed_manifest else ''
if embed_manifest:
suffix += '_embed'
if link_incremental:
suffix += '_inc'
return suffix
def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental): def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|.""" """Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type): def FullLinkCommand(ldcmd, out, binary_type):
"""Returns a one-liner written for cmd.exe to handle multiphase linker
operations including manifest file generation. The command will be
structured as follows:
cmd /c (linkcmd1 a b) && (linkcmd2 x y) && ... &&
if not "$manifests"=="" ((manifestcmd1 a b) && (manifestcmd2 x y) && ... )
Note that $manifests becomes empty when no manifest file is generated."""
link_commands = ['%(ldcmd)s',
'if exist %(out)s.manifest del %(out)s.manifest']
mt_cmd = ('%(python)s gyp-win-tool manifest-wrapper'
' $arch $mt -nologo -manifest $manifests')
if embed_manifest and not link_incremental:
# Embed manifest into a binary. If incremental linking is enabled,
# embedding is postponed to the re-linking stage (see below).
mt_cmd += ' -outputresource:%(out)s;%(resname)s'
else:
# Save manifest as an external file.
mt_cmd += ' -out:%(out)s.manifest'
manifest_commands = [mt_cmd]
if link_incremental:
# There is no point in generating separate rule for the case when
# incremental linking is enabled, but manifest embedding is disabled.
# In that case the basic rule should be used (e.g. 'link').
# See also implementation of _GetWinLinkRuleNameSuffix().
assert embed_manifest
# Make .rc file out of manifest, compile it to .res file and re-link.
manifest_commands += [
('%(python)s gyp-win-tool manifest-to-rc $arch %(out)s.manifest'
' %(out)s.manifest.rc %(resname)s'),
'%(python)s gyp-win-tool rc-wrapper $arch $rc %(out)s.manifest.rc',
'%(ldcmd)s %(out)s.manifest.res']
cmd = 'cmd /c %s && if not "$manifests"=="" (%s)' % (
' && '.join(['(%s)' % c for c in link_commands]),
' && '.join(['(%s)' % c for c in manifest_commands]))
resource_name = { resource_name = {
'exe': '1', 'exe': '1',
'dll': '2', 'dll': '2',
}[binary_type] }[binary_type]
return cmd % {'python': sys.executable, return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
'out': out, '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
'ldcmd': ldcmd, '$manifests' % {
'resname': resource_name} 'python': sys.executable,
'out': out,
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental) 'ldcmd': ldcmd,
'resname': resource_name,
'embed': embed_manifest }
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
dlldesc = 'LINK%s(DLL) $dll' % rule_name_suffix.upper() dlldesc = 'LINK%s(DLL) $dll' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch ' dllcmd = ('%s gyp-win-tool link-wrapper $arch '
'$ld /nologo $implibflag /DLL /OUT:$dll ' '$ld /nologo $implibflag /DLL /OUT:$dll '
@ -1915,12 +1882,8 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
sys.executable), sys.executable),
rspfile='$out.rsp', rspfile='$out.rsp',
rspfile_content='$in_newline $libflags') rspfile_content='$in_newline $libflags')
_AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=True) _AddWinLinkRules(master_ninja, embed_manifest=True)
_AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=False) _AddWinLinkRules(master_ninja, embed_manifest=False)
_AddWinLinkRules(master_ninja, embed_manifest=False, link_incremental=False)
# Do not generate rules for embed_manifest=False and link_incremental=True
# because in that case rules for (False, False) should be used (see
# implementation of _GetWinLinkRuleNameSuffix()).
else: else:
master_ninja.rule( master_ninja.rule(
'objc', 'objc',

3
tools/gyp/pylib/gyp/input.py

@ -822,8 +822,7 @@ def ExpandVariables(input, phase, variables, build_file):
rel_build_file_dir = build_file_dir rel_build_file_dir = build_file_dir
qualified_out_dir = generator_filelist_paths['qualified_out_dir'] qualified_out_dir = generator_filelist_paths['qualified_out_dir']
path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement) path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
if not os.path.isdir(os.path.dirname(path)): gyp.common.EnsureDirExists(path)
os.makedirs(os.path.dirname(path))
replacement = gyp.common.RelativePath(path, build_file_dir) replacement = gyp.common.RelativePath(path, build_file_dir)
f = gyp.common.WriteOnDiff(path) f = gyp.common.WriteOnDiff(path)

74
tools/gyp/pylib/gyp/msvs_emulation.py

@ -454,7 +454,7 @@ class MsvsSettings(object):
return output_file return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special, def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, is_executable): manifest_base_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the """Returns the flags that need to be added to link commands, and the
manifest files.""" manifest files."""
config = self._TargetConfig(config) config = self._TargetConfig(config)
@ -502,6 +502,7 @@ class MsvsSettings(object):
ld('DataExecutionPrevention', ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT') map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:') ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:') ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration', ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE', map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
@ -531,17 +532,21 @@ class MsvsSettings(object):
ldflags.append('/NXCOMPAT') ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags) have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, manifest_files = self._GetLdManifestFlags( manifest_flags, intermediate_manifest, manifest_files = \
config, manifest_base_name, gyp_to_build_path, self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file) is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags) ldflags.extend(manifest_flags)
return ldflags, manifest_files return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path, def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation): allow_isolation, build_dir):
"""Returns the set of flags that need to be added to the link to generate """Returns a 3-tuple:
a default manifest, as well as the list of all the manifest files to be - the set of flags that need to be added to the link to generate
merged by the manifest tool.""" a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'), generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config, config,
default='true') default='true')
@ -549,7 +554,7 @@ class MsvsSettings(object):
# This means not only that the linker should not generate the intermediate # This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when # manifest but also that the manifest tool should do nothing even when
# additional manifests are specified. # additional manifests are specified.
return ['/MANIFEST:NO'], [] return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest' output_name = name + '.intermediate.manifest'
flags = [ flags = [
@ -557,9 +562,25 @@ class MsvsSettings(object):
'/ManifestFile:' + output_name, '/ManifestFile:' + output_name,
] ]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config) config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config, enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true') default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true': if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'), execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0') config, default='0')
@ -571,18 +592,38 @@ class MsvsSettings(object):
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config, ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false') default='false')
flags.append('''/MANIFESTUAC:"level='%s' uiAccess='%s'"''' %
(execution_level_map[execution_level], ui_access)) inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else: else:
flags.append('/MANIFESTUAC:NO') inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation: if allow_isolation:
flags.append('/ALLOWISOLATION') flags.append('/ALLOWISOLATION')
manifest_files = [output_name]
manifest_files += self._GetAdditionalManifestFiles(config, manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path) gyp_to_build_path)
return flags, manifest_files return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one """Gets additional manifest files that are added to the default one
@ -605,7 +646,8 @@ class MsvsSettings(object):
def IsEmbedManifest(self, config): def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary.""" """Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config) config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config) embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true' return embed == 'true'
def IsLinkIncremental(self, config): def IsLinkIncremental(self, config):

287
tools/gyp/pylib/gyp/ordered_dict.py

@ -0,0 +1,287 @@
# Unmodified from http://code.activestate.com/recipes/576693/
# other than to add MIT license header (as specified on page, but not in code).
# Linked from Python documentation here:
# http://docs.python.org/2/library/collections.html#collections.OrderedDict
#
# This should be deleted once Py2.7 is available on all bots, see
# http://crbug.com/241769.
#
# Copyright (c) 2009 Raymond Hettinger.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)

77
tools/gyp/pylib/gyp/win_tool.py

@ -13,6 +13,7 @@ import os
import re import re
import shutil import shutil
import subprocess import subprocess
import string
import sys import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__)) BASE_DIR = os.path.dirname(os.path.abspath(__file__))
@ -116,6 +117,82 @@ class WinTool(object):
print line print line
return link.returncode return link.returncode
def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
mt, rc, intermediate_manifest, *manifests):
"""A wrapper for handling creating a manifest resource and then executing
a link command."""
# The 'normal' way to do manifests is to have link generate a manifest
# based on gathering dependencies from the object files, then merge that
# manifest with other manifests supplied as sources, convert the merged
# manifest to a resource, and then *relink*, including the compiled
# version of the manifest resource. This breaks incremental linking, and
# is generally overly complicated. Instead, we merge all the manifests
# provided (along with one that includes what would normally be in the
# linker-generated one, see msvs_emulation.py), and include that into the
# first and only link. We still tell link to generate a manifest, but we
# only use that to assert that our simpler process did not miss anything.
variables = {
'python': sys.executable,
'arch': arch,
'out': out,
'ldcmd': ldcmd,
'resname': resname,
'mt': mt,
'rc': rc,
'intermediate_manifest': intermediate_manifest,
'manifests': ' '.join(manifests),
}
add_to_ld = ''
if manifests:
subprocess.check_call(
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
'-manifest %(manifests)s -out:%(out)s.manifest' % variables)
if embed_manifest == 'True':
subprocess.check_call(
'%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
' %(out)s.manifest.rc %(resname)s' % variables)
subprocess.check_call(
'%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
'%(out)s.manifest.rc' % variables)
add_to_ld = ' %(out)s.manifest.res' % variables
subprocess.check_call(ldcmd + add_to_ld)
# Run mt.exe on the theoretically complete manifest we generated, merging
# it with the one the linker generated to confirm that the linker
# generated one does not add anything. This is strictly unnecessary for
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
# used in a #pragma comment.
if manifests:
# Merge the intermediate one with ours to .assert.manifest, then check
# that .assert.manifest is identical to ours.
subprocess.check_call(
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
'-manifest %(out)s.manifest %(intermediate_manifest)s '
'-out:%(out)s.assert.manifest' % variables)
assert_manifest = '%(out)s.assert.manifest' % variables
our_manifest = '%(out)s.manifest' % variables
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
# and sometimes doesn't unfortunately.
with open(our_manifest, 'rb') as our_f:
with open(assert_manifest, 'rb') as assert_f:
our_data = our_f.read().translate(None, string.whitespace)
assert_data = assert_f.read().translate(None, string.whitespace)
if our_data != assert_data:
os.unlink(out)
def dump(filename):
sys.stderr.write('%s\n-----\n' % filename)
with open(filename, 'rb') as f:
sys.stderr.write(f.read() + '\n-----\n')
dump(intermediate_manifest)
dump(our_manifest)
dump(assert_manifest)
sys.stderr.write(
'Linker generated manifest "%s" added to final manifest "%s" '
'(result in "%s"). '
'Were /MANIFEST switches used in #pragma statements? ' % (
intermediate_manifest, our_manifest, assert_manifest))
return 1
def ExecManifestWrapper(self, arch, *args): def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning """Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest (some XML blocks are recognized by the OS loader, but not the manifest

86
tools/gyp/pylib/gyp/xcode_emulation.py

@ -280,7 +280,14 @@ class XcodeSettings(object):
return out.rstrip('\n') return out.rstrip('\n')
def _GetSdkVersionInfoItem(self, sdk, infoitem): def _GetSdkVersionInfoItem(self, sdk, infoitem):
return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem]) # xcodebuild requires Xcode and can't run on Command Line Tools-only
# systems from 10.7 onward.
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
except:
pass
def _SdkRoot(self, configname): def _SdkRoot(self, configname):
if configname is None: if configname is None:
@ -323,7 +330,7 @@ class XcodeSettings(object):
cflags = [] cflags = []
sdk_root = self._SdkPath() sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings(): if 'SDKROOT' in self._Settings() and sdk_root:
cflags.append('-isysroot %s' % sdk_root) cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'): if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
@ -409,10 +416,14 @@ class XcodeSettings(object):
cflags += self._Settings().get('WARNING_CFLAGS', []) cflags += self._Settings().get('WARNING_CFLAGS', [])
if sdk_root:
framework_root = sdk_root
else:
framework_root = ''
config = self.spec['configurations'][self.configname] config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', []) framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs: for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
self.configname = None self.configname = None
return cflags return cflags
@ -628,7 +639,7 @@ class XcodeSettings(object):
self._AppendPlatformVersionMinFlags(ldflags) self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings(): if 'SDKROOT' in self._Settings() and self._SdkPath():
ldflags.append('-isysroot ' + self._SdkPath()) ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
@ -659,10 +670,13 @@ class XcodeSettings(object):
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []): for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath) ldflags.append('-Wl,-rpath,' + rpath)
sdk_root = self._SdkPath()
if not sdk_root:
sdk_root = ''
config = self.spec['configurations'][self.configname] config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', []) framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs: for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath())) ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
self.configname = None self.configname = None
return ldflags return ldflags
@ -820,9 +834,11 @@ class XcodeSettings(object):
['security', 'find-identity', '-p', 'codesigning', '-v']) ['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines(): for line in output.splitlines():
if identity in line: if identity in line:
assert identity not in XcodeSettings._codesigning_key_cache, ( fingerprint = line.split()[1]
"Multiple codesigning identities for identity: %s" % identity) cache = XcodeSettings._codesigning_key_cache
XcodeSettings._codesigning_key_cache[identity] = line.split()[1] assert identity not in cache or fingerprint == cache[identity], (
"Multiple codesigning fingerprints for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = fingerprint
return XcodeSettings._codesigning_key_cache.get(identity, '') return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary, def AddImplicitPostbuilds(self, configname, output, output_binary,
@ -843,7 +859,11 @@ class XcodeSettings(object):
l = '-l' + m.group(1) l = '-l' + m.group(1)
else: else:
l = library l = library
return l.replace('$(SDKROOT)', self._SdkPath(config_name))
sdk_root = self._SdkPath(config_name)
if not sdk_root:
sdk_root = ''
return l.replace('$(SDKROOT)', sdk_root)
def AdjustLibraries(self, libraries, config_name=None): def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like """Transforms entries like 'Cocoa.framework' in libraries into entries like
@ -856,6 +876,27 @@ class XcodeSettings(object):
def _BuildMachineOSBuild(self): def _BuildMachineOSBuild(self):
return self._GetStdout(['sw_vers', '-buildVersion']) return self._GetStdout(['sw_vers', '-buildVersion'])
# This method ported from the logic in Homebrew's CLT version check
def _CLTVersion(self):
# pkgutil output looks like
# package-id: com.apple.pkg.CLTools_Executables
# version: 5.0.1.0.1.1382131676
# volume: /
# location: /
# install-time: 1382544035
# groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
regex = re.compile('version: (?P<version>.+)')
for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
try:
output = self._GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
return re.search(regex, output).groupdict()['version']
except:
continue
def _XcodeVersion(self): def _XcodeVersion(self):
# `xcodebuild -version` output looks like # `xcodebuild -version` output looks like
# Xcode 4.6.3 # Xcode 4.6.3
@ -866,13 +907,30 @@ class XcodeSettings(object):
# BuildVersion: 10M2518 # BuildVersion: 10M2518
# Convert that to '0463', '4H1503'. # Convert that to '0463', '4H1503'.
if len(XcodeSettings._xcode_version_cache) == 0: if len(XcodeSettings._xcode_version_cache) == 0:
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines() try:
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
# In some circumstances xcodebuild exits 0 but doesn't return
# the right results; for example, a user on 10.7 or 10.8 with
# a bogus path set via xcode-select
# In that case this may be a CLT-only install so fall back to
# checking that version.
if len(version_list) < 2:
raise GypError, "xcodebuild returned unexpected results"
except:
version = self._CLTVersion()
if version:
version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
else:
raise GypError, "No Xcode or CLT version detected!"
# The CLT has no build information, so we return an empty string.
version_list = [version, '']
version = version_list[0] version = version_list[0]
build = version_list[-1] build = version_list[-1]
# Be careful to convert "4.2" to "0420": # Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '') version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4) version = (version + '0' * (3 - len(version))).zfill(4)
build = build.split()[-1] if build:
build = build.split()[-1]
XcodeSettings._xcode_version_cache = (version, build) XcodeSettings._xcode_version_cache = (version, build)
return XcodeSettings._xcode_version_cache return XcodeSettings._xcode_version_cache
@ -930,7 +988,11 @@ class XcodeSettings(object):
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path) default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root: if default_sdk_root:
return default_sdk_root return default_sdk_root
all_sdks = self._GetStdout(['xcodebuild', '-showsdks']) try:
all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
except:
# If xcodebuild fails, there will be no valid SDKs
return ''
for line in all_sdks.splitlines(): for line in all_sdks.splitlines():
items = line.split() items = line.split()
if len(items) >= 3 and items[-2] == '-sdk': if len(items) >= 3 and items[-2] == '-sdk':

Loading…
Cancel
Save