Browse Source

tools: update gyp to r1426

v0.8.7-release
Ben Noordhuis 13 years ago
parent
commit
fc4e12b8f1
  1. 4
      tools/gyp/DEPS
  2. 62
      tools/gyp/PRESUBMIT.py
  3. 1
      tools/gyp/buildbot/buildbot_run.py
  4. 11
      tools/gyp/pylib/gyp/MSVSNew.py
  5. 4
      tools/gyp/pylib/gyp/MSVSProject.py
  6. 4
      tools/gyp/pylib/gyp/MSVSSettings.py
  7. 17
      tools/gyp/pylib/gyp/MSVSSettings_test.py
  8. 4
      tools/gyp/pylib/gyp/MSVSToolFile.py
  9. 5
      tools/gyp/pylib/gyp/MSVSUserFile.py
  10. 145
      tools/gyp/pylib/gyp/MSVSVersion.py
  11. 4
      tools/gyp/pylib/gyp/SCons.py
  12. 56
      tools/gyp/pylib/gyp/common.py
  13. 44
      tools/gyp/pylib/gyp/common_test.py
  14. 2
      tools/gyp/pylib/gyp/generator/dump_dependency_json.py
  15. 271
      tools/gyp/pylib/gyp/generator/eclipse.py
  16. 178
      tools/gyp/pylib/gyp/generator/make.py
  17. 343
      tools/gyp/pylib/gyp/generator/msvs.py
  18. 715
      tools/gyp/pylib/gyp/generator/ninja.py
  19. 48
      tools/gyp/pylib/gyp/generator/ninja_test.py
  20. 9
      tools/gyp/pylib/gyp/generator/scons.py
  21. 32
      tools/gyp/pylib/gyp/generator/xcode.py
  22. 673
      tools/gyp/pylib/gyp/input.py
  23. 33
      tools/gyp/pylib/gyp/mac_tool.py
  24. 642
      tools/gyp/pylib/gyp/msvs_emulation.py
  25. 20
      tools/gyp/pylib/gyp/ninja_syntax.py
  26. 161
      tools/gyp/pylib/gyp/win_tool.py
  27. 324
      tools/gyp/pylib/gyp/xcode_emulation.py
  28. 44
      tools/gyp/pylib/gyp/xcodeproj_file.py
  29. 23
      tools/gyp/test/actions-bare/gyptest-bare.py
  30. 25
      tools/gyp/test/actions-bare/src/bare.gyp
  31. 11
      tools/gyp/test/actions-bare/src/bare.py
  32. 42
      tools/gyp/test/actions-multiple/gyptest-all.py
  33. 165
      tools/gyp/test/actions-multiple/src/actions.gyp
  34. 9
      tools/gyp/test/actions-multiple/src/copy.py
  35. 12
      tools/gyp/test/actions-multiple/src/filter.py
  36. 11
      tools/gyp/test/actions-multiple/src/foo.c
  37. 1
      tools/gyp/test/actions-multiple/src/input.txt
  38. 22
      tools/gyp/test/actions-multiple/src/main.c
  39. 26
      tools/gyp/test/actions-subdir/gyptest-action.py
  40. 11
      tools/gyp/test/actions-subdir/src/make-file.py
  41. 31
      tools/gyp/test/actions-subdir/src/none.gyp
  42. 11
      tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py
  43. 28
      tools/gyp/test/actions-subdir/src/subdir/subdir.gyp
  44. 101
      tools/gyp/test/actions/gyptest-all.py
  45. 68
      tools/gyp/test/actions/gyptest-default.py
  46. 24
      tools/gyp/test/actions/gyptest-errors.py
  47. 24
      tools/gyp/test/actions/src/action_missing_name.gyp
  48. 114
      tools/gyp/test/actions/src/actions.gyp
  49. 21
      tools/gyp/test/actions/src/confirm-dep-files.py
  50. 46
      tools/gyp/test/actions/src/subdir1/counter.py
  51. 74
      tools/gyp/test/actions/src/subdir1/executable.gyp
  52. 20
      tools/gyp/test/actions/src/subdir1/make-prog1.py
  53. 20
      tools/gyp/test/actions/src/subdir1/make-prog2.py
  54. 12
      tools/gyp/test/actions/src/subdir1/program.c
  55. 11
      tools/gyp/test/actions/src/subdir2/make-file.py
  56. 33
      tools/gyp/test/actions/src/subdir2/none.gyp
  57. 21
      tools/gyp/test/actions/src/subdir3/generate_main.py
  58. 29
      tools/gyp/test/actions/src/subdir3/null_input.gyp
  59. 55
      tools/gyp/test/additional-targets/gyptest-additional.py
  60. 13
      tools/gyp/test/additional-targets/src/all.gyp
  61. 56
      tools/gyp/test/additional-targets/src/dir1/actions.gyp
  62. 11
      tools/gyp/test/additional-targets/src/dir1/emit.py
  63. 6
      tools/gyp/test/additional-targets/src/dir1/lib1.c
  64. 31
      tools/gyp/test/assembly/gyptest-assembly.py
  65. 4
      tools/gyp/test/assembly/src/as.bat
  66. 59
      tools/gyp/test/assembly/src/assembly.gyp
  67. 10
      tools/gyp/test/assembly/src/lib1.S
  68. 3
      tools/gyp/test/assembly/src/lib1.c
  69. 12
      tools/gyp/test/assembly/src/program.c
  70. 77
      tools/gyp/test/builddir/gyptest-all.py
  71. 77
      tools/gyp/test/builddir/gyptest-default.py
  72. 21
      tools/gyp/test/builddir/src/builddir.gypi
  73. 6
      tools/gyp/test/builddir/src/func1.c
  74. 6
      tools/gyp/test/builddir/src/func2.c
  75. 6
      tools/gyp/test/builddir/src/func3.c
  76. 6
      tools/gyp/test/builddir/src/func4.c
  77. 6
      tools/gyp/test/builddir/src/func5.c
  78. 10
      tools/gyp/test/builddir/src/prog1.c
  79. 30
      tools/gyp/test/builddir/src/prog1.gyp
  80. 10
      tools/gyp/test/builddir/src/subdir2/prog2.c
  81. 19
      tools/gyp/test/builddir/src/subdir2/prog2.gyp
  82. 10
      tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c
  83. 19
      tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp
  84. 10
      tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c
  85. 19
      tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp
  86. 10
      tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c
  87. 19
      tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp
  88. 15
      tools/gyp/test/cflags/cflags.c
  89. 16
      tools/gyp/test/cflags/cflags.gyp
  90. 65
      tools/gyp/test/cflags/gyptest-cflags.py
  91. 29
      tools/gyp/test/compilable/gyptest-headers.py
  92. 26
      tools/gyp/test/compilable/src/headers.gyp
  93. 7
      tools/gyp/test/compilable/src/lib1.cpp
  94. 6
      tools/gyp/test/compilable/src/lib1.hpp
  95. 9
      tools/gyp/test/compilable/src/program.cpp
  96. 15
      tools/gyp/test/configurations/basics/configurations.c
  97. 32
      tools/gyp/test/configurations/basics/configurations.gyp
  98. 29
      tools/gyp/test/configurations/basics/gyptest-configurations.py
  99. 21
      tools/gyp/test/configurations/inheritance/configurations.c
  100. 40
      tools/gyp/test/configurations/inheritance/configurations.gyp

4
tools/gyp/DEPS

@ -4,6 +4,7 @@
vars = { vars = {
"chrome_trunk": "http://src.chromium.org/svn/trunk", "chrome_trunk": "http://src.chromium.org/svn/trunk",
"googlecode_url": "http://%s.googlecode.com/svn",
} }
deps = { deps = {
@ -18,5 +19,8 @@ deps_os = {
"third_party/python_26": "third_party/python_26":
Var("chrome_trunk") + "/tools/third_party/python_26@89111", Var("chrome_trunk") + "/tools/third_party/python_26@89111",
"src/third_party/pefile":
(Var("googlecode_url") % "pefile") + "/trunk@63",
}, },
} }

62
tools/gyp/PRESUBMIT.py

@ -1,4 +1,4 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -10,6 +10,62 @@ for more details about the presubmit API built into gcl.
""" """
PYLINT_BLACKLIST = [
# TODO: fix me.
# From SCons, not done in google style.
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
# Needs style fix.
'pylib/gyp/generator/scons.py',
'pylib/gyp/generator/xcode.py',
]
PYLINT_DISABLED_WARNINGS = [
# TODO: fix me.
# Many tests include modules they don't use.
'W0611',
# Include order doesn't properly include local files?
'F0401',
# Some use of built-in names.
'W0622',
# Some unused variables.
'W0612',
# Operator not preceded/followed by space.
'C0323',
'C0322',
# Unnecessary semicolon.
'W0301',
# Unused argument.
'W0613',
# String has no effect (docstring in wrong place).
'W0105',
# Comma not followed by space.
'C0324',
# Access to a protected member.
'W0212',
# Bad indent.
'W0311',
# Line too long.
'C0301',
# Undefined variable.
'E0602',
# Not exception type specified.
'W0702',
# No member of that name.
'E1101',
# Dangerous default {}.
'W0102',
# Others, too many to sort.
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
'R0201', 'E0101', 'C0321',
# ************* Module copy
# W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
'W0104',
]
def CheckChangeOnUpload(input_api, output_api): def CheckChangeOnUpload(input_api, output_api):
report = [] report = []
report.extend(input_api.canned_checks.PanProjectChecks( report.extend(input_api.canned_checks.PanProjectChecks(
@ -41,7 +97,9 @@ def CheckChangeOnCommit(input_api, output_api):
sys.path = ['pylib', 'test/lib'] + sys.path sys.path = ['pylib', 'test/lib'] + sys.path
report.extend(input_api.canned_checks.RunPylint( report.extend(input_api.canned_checks.RunPylint(
input_api, input_api,
output_api)) output_api,
black_list=PYLINT_BLACKLIST,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally: finally:
sys.path = old_sys_path sys.path = old_sys_path
return report return report

1
tools/gyp/buildbot/buildbot_run.py

@ -79,6 +79,7 @@ def GypBuild():
retcode += GypTestFormat('xcode') retcode += GypTestFormat('xcode')
retcode += GypTestFormat('make') retcode += GypTestFormat('make')
elif sys.platform == 'win32': elif sys.platform == 'win32':
retcode += GypTestFormat('ninja')
retcode += GypTestFormat('msvs-2008', format='msvs', msvs_version='2008') retcode += GypTestFormat('msvs-2008', format='msvs', msvs_version='2008')
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010') retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')

11
tools/gyp/pylib/gyp/MSVSNew.py

@ -1,10 +1,9 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""New implementation of Visual Studio project generation for SCons.""" """New implementation of Visual Studio project generation for SCons."""
import common
import os import os
import random import random
@ -139,10 +138,11 @@ class MSVSProject(object):
else: else:
self.config_platform_overrides = {} self.config_platform_overrides = {}
self.fixpath_prefix = fixpath_prefix self.fixpath_prefix = fixpath_prefix
self.msbuild_toolset = None
def set_dependencies(self, dependencies): def set_dependencies(self, dependencies):
self.dependencies = list(dependencies or []) self.dependencies = list(dependencies or [])
def get_guid(self): def get_guid(self):
if self.guid is None: if self.guid is None:
# Set GUID from path # Set GUID from path
@ -160,6 +160,9 @@ class MSVSProject(object):
self.guid = MakeGuid(self.name) self.guid = MakeGuid(self.name)
return self.guid return self.guid
def set_msbuild_toolset(self, msbuild_toolset):
self.msbuild_toolset = msbuild_toolset
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------
@ -204,7 +207,7 @@ class MSVSSolution:
self.Write() self.Write()
def Write(self, writer=common.WriteOnDiff): def Write(self, writer=gyp.common.WriteOnDiff):
"""Writes the solution file to disk. """Writes the solution file to disk.
Raises: Raises:

4
tools/gyp/pylib/gyp/MSVSProject.py

@ -1,10 +1,10 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Visual Studio project reader/writer.""" """Visual Studio project reader/writer."""
import common import gyp.common
import gyp.easy_xml as easy_xml import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------ #------------------------------------------------------------------------------

4
tools/gyp/pylib/gyp/MSVSSettings.py

@ -1,4 +1,4 @@
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -481,7 +481,7 @@ _link = _Tool('VCLinkerTool', 'Link')
_midl = _Tool('VCMIDLTool', 'Midl') _midl = _Tool('VCMIDLTool', 'Midl')
_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile') _rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
_lib = _Tool('VCLibrarianTool', 'Lib') _lib = _Tool('VCLibrarianTool', 'Lib')
_manifest = _Tool('VCManifestTool', 'Mt') _manifest = _Tool('VCManifestTool', 'Manifest')
_AddTool(_compile) _AddTool(_compile)

17
tools/gyp/pylib/gyp/MSVSSettings_test.py

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -514,7 +514,7 @@ class TestSequenceFunctions(unittest.TestCase):
'TreatLibWarningAsErrors': 'true', 'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true', 'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'}, 'Verbose': 'true'},
'Mt': { 'Manifest': {
'AdditionalManifestFiles': 'file1;file2', 'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1', 'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1', 'AssemblyIdentity': 'a string1',
@ -550,11 +550,12 @@ class TestSequenceFunctions(unittest.TestCase):
self._ExpectedWarnings([ self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast', 'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ', 'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Mt/notgood3', 'Warning: unrecognized setting Manifest/notgood3',
"Warning: for Mt/GenerateCatalogFiles, expected bool; got 'truel'", 'Warning: for Manifest/GenerateCatalogFiles, '
"expected bool; got 'truel'",
'Warning: for Lib/TargetMachine, unrecognized enumerated value ' 'Warning: for Lib/TargetMachine, unrecognized enumerated value '
'MachineX86i', 'MachineX86i',
"Warning: for Mt/EnableDPIAwareness, expected bool; got 'fal'"]) "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
def testConvertToMSBuildSettings_empty(self): def testConvertToMSBuildSettings_empty(self):
"""Tests an empty conversion.""" """Tests an empty conversion."""
@ -1054,7 +1055,7 @@ class TestSequenceFunctions(unittest.TestCase):
'OutputFile': 'a_file_name', 'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true', 'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'}, 'UseUnicodeResponseFiles': 'true'},
'Mt': { 'Manifest': {
'AdditionalManifestFiles': 'file1;file2;file3', 'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string', 'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string', 'AssemblyIdentity': 'a_string',
@ -1124,7 +1125,7 @@ class TestSequenceFunctions(unittest.TestCase):
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)', AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)', PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Mt: Manifest:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)', AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)', AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)', InputResourceManifests: ';%(InputResourceManifests)',
@ -1442,7 +1443,7 @@ class TestSequenceFunctions(unittest.TestCase):
'PreprocessorDefinitions': '_UNICODE;UNICODE2', 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res', 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'}, 'ShowProgress': 'true'},
'Mt': { 'Manifest': {
'AdditionalManifestFiles': 'sfsdfsd', 'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd', 'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa', 'AssemblyIdentity': 'sddfdsadfsa',

4
tools/gyp/pylib/gyp/MSVSToolFile.py

@ -1,10 +1,10 @@
# Copyright (c) 2009 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Visual Studio project reader/writer.""" """Visual Studio project reader/writer."""
import common import gyp.common
import gyp.easy_xml as easy_xml import gyp.easy_xml as easy_xml

5
tools/gyp/pylib/gyp/MSVSUserFile.py

@ -1,13 +1,14 @@
# Copyright (c) 2009 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""Visual Studio user preferences file writer.""" """Visual Studio user preferences file writer."""
import common
import os import os
import re import re
import socket # for gethostname import socket # for gethostname
import gyp.common
import gyp.easy_xml as easy_xml import gyp.easy_xml as easy_xml

145
tools/gyp/pylib/gyp/MSVSVersion.py

@ -1,4 +1,4 @@
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -15,13 +15,16 @@ class VisualStudioVersion(object):
"""Information regarding a version of Visual Studio.""" """Information regarding a version of Visual Studio."""
def __init__(self, short_name, description, def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj): solution_version, project_version, flat_sln, uses_vcxproj,
path, sdk_based):
self.short_name = short_name self.short_name = short_name
self.description = description self.description = description
self.solution_version = solution_version self.solution_version = solution_version
self.project_version = project_version self.project_version = project_version
self.flat_sln = flat_sln self.flat_sln = flat_sln
self.uses_vcxproj = uses_vcxproj self.uses_vcxproj = uses_vcxproj
self.path = path
self.sdk_based = sdk_based
def ShortName(self): def ShortName(self):
return self.short_name return self.short_name
@ -49,6 +52,43 @@ class VisualStudioVersion(object):
"""Returns the file extension for the project.""" """Returns the file extension for the project."""
return self.uses_vcxproj and '.vcxproj' or '.vcproj' return self.uses_vcxproj and '.vcxproj' or '.vcproj'
def Path(self):
"""Returns the path to Visual Studio installation."""
return self.path
def ToolPath(self, tool):
"""Returns the path to a given compiler tool. """
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
def SetupScript(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
# Check if we are running in the SDK command line environment and use
# the setup script from the SDK if so. |target_arch| should be either
# 'x86' or 'x64'.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
if self.sdk_based and sdk_dir:
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
'/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
assert target_arch == 'x64'
arg = 'x86_amd64'
if (os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
# Use the 64-on-64 compiler if we can.
arg = 'amd64'
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
def _RegistryQueryBase(sysdir, key, value): def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key. """Use reg.exe to read a particular key.
@ -140,7 +180,7 @@ def _RegistryKeyExists(key):
return True return True
def _CreateVersion(name): def _CreateVersion(name, path, sdk_based=False):
"""Sets up MSVS project generation. """Sets up MSVS project generation.
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
@ -153,42 +193,54 @@ def _CreateVersion(name):
solution_version='11.00', solution_version='11.00',
project_version='4.0', project_version='4.0',
flat_sln=False, flat_sln=False,
uses_vcxproj=True), uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e', '2010e': VisualStudioVersion('2010e',
'Visual Studio 2010', 'Visual Studio 2010',
solution_version='11.00', solution_version='11.00',
project_version='4.0', project_version='4.0',
flat_sln=True, flat_sln=True,
uses_vcxproj=True), uses_vcxproj=True,
path=path,
sdk_based=sdk_based),
'2008': VisualStudioVersion('2008', '2008': VisualStudioVersion('2008',
'Visual Studio 2008', 'Visual Studio 2008',
solution_version='10.00', solution_version='10.00',
project_version='9.00', project_version='9.00',
flat_sln=False, flat_sln=False,
uses_vcxproj=False), uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2008e': VisualStudioVersion('2008e', '2008e': VisualStudioVersion('2008e',
'Visual Studio 2008', 'Visual Studio 2008',
solution_version='10.00', solution_version='10.00',
project_version='9.00', project_version='9.00',
flat_sln=True, flat_sln=True,
uses_vcxproj=False), uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005': VisualStudioVersion('2005', '2005': VisualStudioVersion('2005',
'Visual Studio 2005', 'Visual Studio 2005',
solution_version='9.00', solution_version='9.00',
project_version='8.00', project_version='8.00',
flat_sln=False, flat_sln=False,
uses_vcxproj=False), uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
'2005e': VisualStudioVersion('2005e', '2005e': VisualStudioVersion('2005e',
'Visual Studio 2005', 'Visual Studio 2005',
solution_version='9.00', solution_version='9.00',
project_version='8.00', project_version='8.00',
flat_sln=True, flat_sln=True,
uses_vcxproj=False), uses_vcxproj=False,
path=path,
sdk_based=sdk_based),
} }
return versions[str(name)] return versions[str(name)]
def _DetectVisualStudioVersions(): def _DetectVisualStudioVersions(versions_to_check, force_express):
"""Collect the list of installed visual studio versions. """Collect the list of installed visual studio versions.
Returns: Returns:
@ -204,25 +256,10 @@ def _DetectVisualStudioVersions():
""" """
version_to_year = {'8.0': '2005', '9.0': '2008', '10.0': '2010'} version_to_year = {'8.0': '2005', '9.0': '2008', '10.0': '2010'}
versions = [] versions = []
# For now, prefer versions before VS2010 for version in versions_to_check:
for version in ('9.0', '8.0', '10.0'): # Old method of searching for which VS version is installed
# Check if VS2010 and later is installed as specified by # We don't use the 2010-encouraged-way because we also want to get the
# http://msdn.microsoft.com/en-us/library/bb164659.aspx # path to the binaries, which it doesn't offer.
keys = [r'HKLM\SOFTWARE\Microsoft\DevDiv\VS\Servicing\%s' % version,
r'HKLM\SOFTWARE\Wow6432Node\Microsoft\DevDiv\VS\Servicing\%s' % (
version)]
for index in range(len(keys)):
if not _RegistryKeyExists(keys[index]):
continue
# Check for express
if _RegistryKeyExists(keys[index] + '\\expbsln'):
# Add this one
versions.append(_CreateVersion(version_to_year[version] + 'e'))
else:
# Add this one
versions.append(_CreateVersion(version_to_year[version]))
# Old (pre-VS2010) method of searching for which VS version is installed
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version, keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version, r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
r'HKLM\Software\Microsoft\VCExpress\%s' % version, r'HKLM\Software\Microsoft\VCExpress\%s' % version,
@ -232,13 +269,28 @@ def _DetectVisualStudioVersions():
if not path: if not path:
continue continue
# Check for full. # Check for full.
if os.path.exists(os.path.join(path, 'devenv.exe')): full_path = os.path.join(path, 'devenv.exe')
express_path = os.path.join(path, 'vcexpress.exe')
if not force_express and os.path.exists(full_path):
# Add this one. # Add this one.
versions.append(_CreateVersion(version_to_year[version])) versions.append(_CreateVersion(version_to_year[version],
os.path.join(path, '..', '..')))
# Check for express. # Check for express.
elif os.path.exists(os.path.join(path, 'vcexpress.exe')): elif os.path.exists(express_path):
# Add this one. # Add this one.
versions.append(_CreateVersion(version_to_year[version] + 'e')) versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..', '..')))
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
return versions return versions
@ -253,12 +305,21 @@ def SelectVisualStudioVersion(version='auto'):
# In auto mode, check environment variable for override. # In auto mode, check environment variable for override.
if version == 'auto': if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto') version = os.environ.get('GYP_MSVS_VERSION', 'auto')
# In auto mode, pick the most preferred version present. version_map = {
if version == 'auto': 'auto': ('10.0', '9.0', '8.0'),
versions = _DetectVisualStudioVersions() '2005': ('8.0',),
if not versions: '2005e': ('8.0',),
# Default to 2005. '2008': ('9.0',),
return _CreateVersion('2005') '2008e': ('9.0',),
return versions[0] '2010': ('10.0',),
# Convert version string into a version object. '2010e': ('10.0',),
return _CreateVersion(version) }
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
if not versions:
if version == 'auto':
# Default to 2005 if we couldn't find anything
return _CreateVersion('2005', None)
else:
return _CreateVersion(version, None)
return versions[0]

4
tools/gyp/pylib/gyp/SCons.py

@ -1,4 +1,4 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -84,7 +84,7 @@ class NoneTarget(TargetBase):
""" """
A GYP target type of 'none', implicitly or explicitly. A GYP target type of 'none', implicitly or explicitly.
""" """
def write_target(self, fp, pre=''): def write_target(self, fp, src_dir='', pre=''):
fp.write('\ntarget_files.extend(input_files)\n') fp.write('\ntarget_files.extend(input_files)\n')

56
tools/gyp/pylib/gyp/common.py

@ -355,15 +355,16 @@ def GetFlavor(params):
'sunos5': 'solaris', 'sunos5': 'solaris',
'freebsd7': 'freebsd', 'freebsd7': 'freebsd',
'freebsd8': 'freebsd', 'freebsd8': 'freebsd',
'freebsd9': 'freebsd',
} }
flavor = flavors.get(sys.platform, 'linux') flavor = flavors.get(sys.platform, 'linux')
return params.get('flavor', flavor) return params.get('flavor', flavor)
def CopyTool(flavor, out_path): def CopyTool(flavor, out_path):
"""Finds (mac|sun)_tool.gyp in the gyp directory and copies it """Finds (mac|sun|win)_tool.gyp in the gyp directory and copies it
to |out_path|.""" to |out_path|."""
prefix = { 'solaris': 'sun', 'mac': 'mac' }.get(flavor, None) prefix = { 'solaris': 'sun', 'mac': 'mac', 'win': 'win' }.get(flavor, None)
if not prefix: if not prefix:
return return
@ -391,7 +392,7 @@ def CopyTool(flavor, out_path):
def uniquer(seq, idfun=None): def uniquer(seq, idfun=None):
if idfun is None: if idfun is None:
def idfun(x): return x idfun = lambda x: x
seen = {} seen = {}
result = [] result = []
for item in seq: for item in seq:
@ -400,3 +401,52 @@ def uniquer(seq, idfun=None):
seen[marker] = 1 seen[marker] = 1
result.append(item) result.append(item)
return result return result
class CycleError(Exception):
"""An exception raised when an unexpected cycle is detected."""
def __init__(self, nodes):
self.nodes = nodes
def __str__(self):
return 'CycleError: cycle involving: ' + str(self.nodes)
def TopologicallySorted(graph, get_edges):
"""Topologically sort based on a user provided edge definition.
Args:
graph: A list of node names.
get_edges: A function mapping from node name to a hashable collection
of node names which this node has outgoing edges to.
Returns:
A list containing all of the node in graph in topological order.
It is assumed that calling get_edges once for each node and caching is
cheaper than repeatedly calling get_edges.
Raises:
CycleError in the event of a cycle.
Example:
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
def GetEdges(node):
return re.findall(r'\$\(([^))]\)', graph[node])
print TopologicallySorted(graph.keys(), GetEdges)
==>
['a', 'c', b']
"""
get_edges = memoize(get_edges)
visited = set()
visiting = set()
ordered_nodes = []
def Visit(node):
if node in visiting:
raise CycleError(visiting)
if node in visited:
return
visited.add(node)
visiting.add(node)
for neighbor in get_edges(node):
Visit(neighbor)
visiting.remove(node)
ordered_nodes.insert(0, node)
for node in sorted(graph):
Visit(node)
return ordered_nodes

44
tools/gyp/pylib/gyp/common_test.py

@ -0,0 +1,44 @@
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the common.py file."""
import gyp.common
import unittest
class TestTopologicallySorted(unittest.TestCase):
def test_Valid(self):
"""Test that sorting works on a valid graph with one possible order."""
graph = {
'a': ['b', 'c'],
'b': [],
'c': ['d'],
'd': ['b'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertEqual(
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
['a', 'c', 'd', 'b'])
def test_Cycle(self):
"""Test that an exception is thrown on a cyclic graph."""
graph = {
'a': ['b'],
'b': ['c'],
'c': ['d'],
'd': ['a'],
}
def GetEdge(node):
return tuple(graph[node])
self.assertRaises(
gyp.common.CycleError, gyp.common.TopologicallySorted,
graph.keys(), GetEdge)
if __name__ == '__main__':
unittest.main()

2
tools/gyp/pylib/gyp/generator/dump_dependency_json.py

@ -8,6 +8,8 @@ import gyp.common
import json import json
import sys import sys
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = False generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = { generator_default_variables = {

271
tools/gyp/pylib/gyp/generator/eclipse.py

@ -0,0 +1,271 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GYP backend that generates Eclipse CDT settings files.
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
files that can be imported into an Eclipse CDT project. The XML file contains a
list of include paths and symbols (i.e. defines).
Because a full .cproject definition is not created by this generator, it's not
possible to properly define the include dirs and symbols for each file
individually. Instead, one set of includes/symbols is generated for the entire
project. This works fairly well (and is a vast improvement in general), but may
still result in a few indexer issues here and there.
This generator has no automated tests, so expect it to be broken.
"""
import os.path
import subprocess
import gyp
import gyp.common
import shlex
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
# Some gyp steps fail if these are empty(!).
generator_default_variables[dirname] = 'dir'
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX']:
generator_default_variables[unused] = ''
# Include dirs will occasionaly use the SHARED_INTERMEDIATE_DIR variable as
# part of the path when dealing with generated headers. This value will be
# replaced dynamically for each configuration.
generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
'$SHARED_INTERMEDIATES_DIR'
def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
if generator_flags.get('adjust_static_libraries', False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
def GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediates_dir, config_name):
"""Calculate the set of include directories to be used.
Returns:
A list including all the include_dir's specified for every target followed
by any include directories that were added as cflag compiler options.
"""
gyp_includes_set = set()
compiler_includes_list = []
for target_name in target_list:
target = target_dicts[target_name]
if config_name in target['configurations']:
config = target['configurations'][config_name]
# Look for any include dirs that were explicitly added via cflags. This
# may be done in gyp files to force certain includes to come at the end.
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
# remove this.
cflags = config['cflags']
for cflag in cflags:
include_dir = ''
if cflag.startswith('-I'):
include_dir = cflag[2:]
if include_dir and not include_dir in compiler_includes_list:
compiler_includes_list.append(include_dir)
# Find standard gyp include dirs.
if config.has_key('include_dirs'):
include_dirs = config['include_dirs']
for include_dir in include_dirs:
include_dir = include_dir.replace('$SHARED_INTERMEDIATES_DIR',
shared_intermediates_dir)
if not os.path.isabs(include_dir):
base_dir = os.path.dirname(target_name)
include_dir = base_dir + '/' + include_dir
include_dir = os.path.abspath(include_dir)
if not include_dir in gyp_includes_set:
gyp_includes_set.add(include_dir)
# Generate a list that has all the include dirs.
all_includes_list = list(gyp_includes_set)
all_includes_list.sort()
for compiler_include in compiler_includes_list:
if not compiler_include in gyp_includes_set:
all_includes_list.append(compiler_include)
# All done.
return all_includes_list
def GetCompilerPath(target_list, target_dicts, data):
"""Determine a command that can be used to invoke the compiler.
Returns:
If this is a gyp project that has explicit make settings, try to determine
the compiler from that. Otherwise, see if a compiler was specified via the
CC_target environment variable.
"""
# First, see if the compiler is configured in make's settings.
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get('make_global_settings', {})
for key, value in make_global_settings_dict:
if key in ['CC', 'CXX']:
return value
# Check to see if the compiler was specified as an environment variable.
for key in ['CC_target', 'CC', 'CXX']:
compiler = os.environ.get(key)
if compiler:
return compiler
return 'gcc'
def GetAllDefines(target_list, target_dicts, data, config_name):
"""Calculate the defines for a project.
Returns:
A dict that includes explict defines declared in gyp files along with all of
the default defines that the compiler uses.
"""
# Get defines declared in the gyp files.
all_defines = {}
for target_name in target_list:
target = target_dicts[target_name]
if config_name in target['configurations']:
config = target['configurations'][config_name]
for define in config['defines']:
split_define = define.split('=', 1)
if len(split_define) == 1:
split_define.append('1')
if split_define[0].strip() in all_defines:
# Already defined
continue
all_defines[split_define[0].strip()] = split_define[1].strip()
# Get default compiler defines (if possible).
cc_target = GetCompilerPath(target_list, target_dicts, data)
if cc_target:
command = shlex.split(cc_target)
command.extend(['-E', '-dM', '-'])
cpp_proc = subprocess.Popen(args=command, cwd='.',
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cpp_output = cpp_proc.communicate()[0]
cpp_lines = cpp_output.split('\n')
for cpp_line in cpp_lines:
if not cpp_line.strip():
continue
cpp_line_parts = cpp_line.split(' ', 2)
key = cpp_line_parts[1]
if len(cpp_line_parts) >= 3:
val = cpp_line_parts[2]
else:
val = '1'
all_defines[key] = val
return all_defines
def WriteIncludePaths(out, eclipse_langs, include_dirs):
"""Write the includes section of a CDT settings export file."""
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
'settingswizards.IncludePaths">\n')
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for include_dir in include_dirs:
out.write(' <includepath workspace_path="false">%s</includepath>\n' %
include_dir)
out.write(' </language>\n')
out.write(' </section>\n')
def WriteMacros(out, eclipse_langs, defines):
"""Write the macros section of a CDT settings export file."""
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
'settingswizards.Macros">\n')
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for key in sorted(defines.iterkeys()):
out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
(key, defines[key]))
out.write(' </language>\n')
out.write(' </section>\n')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
config_name)
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
shared_intermediate_dir = os.path.join(toplevel_build, 'obj', 'gen')
if not os.path.exists(toplevel_build):
os.makedirs(toplevel_build)
out = open(os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), 'w')
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
out.write('<cdtprojectproperties>\n')
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
'GNU C++', 'GNU C', 'Assembly']
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dir, config_name)
WriteIncludePaths(out, eclipse_langs, include_dirs)
defines = GetAllDefines(target_list, target_dicts, data, config_name)
WriteMacros(out, eclipse_langs, defines)
out.write('</cdtprojectproperties>\n')
out.close()
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate an XML settings file that can be imported into a CDT project."""
if params['options'].generator_output:
raise NotImplementedError, "--generator_output not implemented for eclipse"
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)

178
tools/gyp/pylib/gyp/generator/make.py

@ -43,8 +43,6 @@ generator_default_variables = {
'RULE_INPUT_PATH': '$(abspath $<)', 'RULE_INPUT_PATH': '$(abspath $<)',
'RULE_INPUT_EXT': '$(suffix $<)', 'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)', 'RULE_INPUT_NAME': '$(notdir $<)',
# This appears unused --- ?
'CONFIGURATION_NAME': '$(BUILDTYPE)', 'CONFIGURATION_NAME': '$(BUILDTYPE)',
} }
@ -54,6 +52,11 @@ generator_supports_multiple_toolsets = True
# Request sorted dependencies in the order from dependents to dependencies. # Request sorted dependencies in the order from dependents to dependencies.
generator_wants_sorted_dependencies = False generator_wants_sorted_dependencies = False
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
def CalculateVariables(default_variables, params): def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp).""" """Calculate additional variables for use in the build (called by gyp)."""
@ -79,7 +82,6 @@ def CalculateVariables(default_variables, params):
global generator_extra_sources_for_rules global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator, generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', []) 'generator_extra_sources_for_rules', [])
global COMPILABLE_EXTENSIONS
COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'}) COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
else: else:
operating_system = flavor operating_system = flavor
@ -438,6 +440,11 @@ endef
.PHONY: %(default_target)s .PHONY: %(default_target)s
%(default_target)s: %(default_target)s:
# make looks for ways to re-generate included makefiles, but in our case, we
# don't have a direct way. Explicitly telling make that it has nothing to do
# for them makes it go faster.
%%.d: ;
# Use FORCE_DO_CMD to force a target to run. Should be coupled with # Use FORCE_DO_CMD to force a target to run. Should be coupled with
# do_cmd. # do_cmd.
.PHONY: FORCE_DO_CMD .PHONY: FORCE_DO_CMD
@ -523,18 +530,7 @@ all:
# target in our tree. Only consider the ones with .d (dependency) info: # target in our tree. Only consider the ones with .d (dependency) info:
d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
ifneq ($(d_files),) ifneq ($(d_files),)
# Rather than include each individual .d file, concatenate them into a include $(d_files)
# single file which make is able to load faster. We split this into
# commands that take 512 files at a time to avoid overflowing the
# command line.
$(shell cat $(wordlist 1,512,$(d_files)) > $(depsdir)/all.deps)
%(generate_all_deps)s
# make looks for ways to re-generate included makefiles, but in our case, we
# don't have a direct way. Explicitly telling make that it has nothing to do
# for them makes it go faster.
$(depsdir)/all.deps: ;
include $(depsdir)/all.deps
endif endif
""" """
@ -589,7 +585,9 @@ def EscapeCppDefine(s):
"""Escapes a CPP define so that it will reach the compiler unaltered.""" """Escapes a CPP define so that it will reach the compiler unaltered."""
s = EscapeShellArgument(s) s = EscapeShellArgument(s)
s = EscapeMakeVariableExpansion(s) s = EscapeMakeVariableExpansion(s)
return s # '#' characters must be escaped even embedded in a string, else Make will
# treat it as the start of a comment.
return s.replace('#', r'\#')
def QuoteIfNecessary(string): def QuoteIfNecessary(string):
@ -602,8 +600,7 @@ def QuoteIfNecessary(string):
def StringToMakefileVariable(string): def StringToMakefileVariable(string):
"""Convert a string to a value that is acceptable as a make variable name.""" """Convert a string to a value that is acceptable as a make variable name."""
# TODO: replace other metacharacters that we encounter. return re.sub('[^a-zA-Z0-9_]', '_', string)
return re.sub('[ {}$]', '_', string)
srcdir_prefix = '' srcdir_prefix = ''
@ -620,6 +617,21 @@ def QuoteSpaces(s, quote=r'\ '):
return s.replace(' ', quote) return s.replace(' ', quote)
def InvertRelativePath(path):
"""Given a relative path like foo/bar, return the inverse relative path:
the path from the relative path back to the origin dir.
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
should always produce the empty string."""
if not path:
return path
# Only need to handle relative paths into subdirectories for now.
assert '..' not in path, path
depth = len(path.split(os.path.sep))
return os.path.sep.join(['..'] * depth)
# Map from qualified target to path to output. # Map from qualified target to path to output.
target_outputs = {} target_outputs = {}
# Map from qualified target to any linkable output. A subset # Map from qualified target to any linkable output. A subset
@ -638,8 +650,6 @@ class MakefileWriter:
def __init__(self, generator_flags, flavor): def __init__(self, generator_flags, flavor):
self.generator_flags = generator_flags self.generator_flags = generator_flags
self.flavor = flavor self.flavor = flavor
# Keep track of the total number of outputs for this makefile.
self._num_outputs = 0
self.suffix_rules_srcdir = {} self.suffix_rules_srcdir = {}
self.suffix_rules_objdir1 = {} self.suffix_rules_objdir1 = {}
@ -664,10 +674,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) """ % (ext, COMPILABLE_EXTENSIONS[ext]))})
def NumOutputs(self):
return self._num_outputs
def Write(self, qualified_target, base_path, output_filename, spec, configs, def Write(self, qualified_target, base_path, output_filename, spec, configs,
part_of_all): part_of_all):
"""The main entry point: writes a .mk file for a single target. """The main entry point: writes a .mk file for a single target.
@ -686,6 +692,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.fp.write(header) self.fp.write(header)
self.qualified_target = qualified_target
self.path = base_path self.path = base_path
self.target = spec['target_name'] self.target = spec['target_name']
self.type = spec['type'] self.type = spec['type']
@ -753,7 +760,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
configs, deps, all_sources, extra_outputs, configs, deps, all_sources, extra_outputs,
extra_link_deps, part_of_all, extra_link_deps, part_of_all,
gyp.xcode_emulation.MacPrefixHeader( gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.Absolutify, self.Pchify)) self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
self.Pchify))
sources = filter(Compilable, all_sources) sources = filter(Compilable, all_sources)
if sources: if sources:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
@ -831,7 +839,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
part_of_all: flag indicating this target is part of 'all' part_of_all: flag indicating this target is part of 'all'
""" """
for action in actions: for action in actions:
name = self.target + '_' + StringToMakefileVariable(action['action_name']) name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name']) self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs'] inputs = action['inputs']
outputs = action['outputs'] outputs = action['outputs']
@ -888,7 +897,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Same for environment. # Same for environment.
self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0])) self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0])) self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
self.WriteXcodeEnv(outputs[0], self.GetXcodeEnv()) self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
for input in inputs: for input in inputs:
assert ' ' not in input, ( assert ' ' not in input, (
@ -898,7 +907,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"Spaces in action output filenames not supported (%s)" % output) "Spaces in action output filenames not supported (%s)" % output)
# See the comment in WriteCopies about expanding env vars. # See the comment in WriteCopies about expanding env vars.
env = self.GetXcodeEnv() env = self.GetSortedXcodeEnv()
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
@ -925,7 +934,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
part_of_all: flag indicating this target is part of 'all' part_of_all: flag indicating this target is part of 'all'
""" """
for rule in rules: for rule in rules:
name = self.target + '_' + StringToMakefileVariable(rule['rule_name']) name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
rule['rule_name']))
count = 0 count = 0
self.WriteLn('### Generated for rule %s:' % name) self.WriteLn('### Generated for rule %s:' % name)
@ -941,12 +951,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
rule_source_dirname) rule_source_dirname)
for out in rule['outputs']] for out in rule['outputs']]
# If an output is just the file name, turn it into a path so
# FixupArgPath() will know to Absolutify() it.
outputs = map(
lambda x : os.path.dirname(x) and x or os.path.join('.', x),
outputs)
for out in outputs: for out in outputs:
dir = os.path.dirname(out) dir = os.path.dirname(out)
if dir: if dir:
@ -955,7 +959,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
extra_sources += outputs extra_sources += outputs
if int(rule.get('process_outputs_as_mac_bundle_resources', False)): if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs extra_mac_bundle_resources += outputs
all_outputs += outputs
inputs = map(Sourceify, map(self.Absolutify, [rule_source] + inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
rule.get('inputs', []))) rule.get('inputs', [])))
actions = ['$(call do_cmd,%s_%d)' % (name, count)] actions = ['$(call do_cmd,%s_%d)' % (name, count)]
@ -969,6 +972,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# amount of pain. # amount of pain.
actions += ['@touch --no-create $@'] actions += ['@touch --no-create $@']
outputs = map(self.Absolutify, outputs)
all_outputs += outputs
# Only write the 'obj' and 'builddir' rules for the "primary" output # Only write the 'obj' and 'builddir' rules for the "primary" output
# (:1); it's superfluous for the "extra outputs", and this avoids # (:1); it's superfluous for the "extra outputs", and this avoids
# accidentally writing duplicate dummy rules for those outputs. # accidentally writing duplicate dummy rules for those outputs.
@ -979,7 +984,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
assert ' ' not in output, ( assert ' ' not in output, (
"Spaces in rule filenames not yet supported (%s)" % output) "Spaces in rule filenames not yet supported (%s)" % output)
self.WriteLn('all_deps += %s' % ' '.join(outputs)) self.WriteLn('all_deps += %s' % ' '.join(outputs))
self._num_outputs += len(outputs)
action = [self.ExpandInputRoot(ac, rule_source_root, action = [self.ExpandInputRoot(ac, rule_source_root,
rule_source_dirname) rule_source_dirname)
@ -1041,7 +1045,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
""" """
self.WriteLn('### Generated for copy rule.') self.WriteLn('### Generated for copy rule.')
variable = self.target + '_copies' variable = StringToMakefileVariable(self.qualified_target + '_copies')
outputs = [] outputs = []
for copy in copies: for copy in copies:
for path in copy['files']: for path in copy['files']:
@ -1060,7 +1064,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# As a workaround, manually expand variables at gyp time. Since 'copies' # As a workaround, manually expand variables at gyp time. Since 'copies'
# can't run scripts, there's no need to write the env then. # can't run scripts, there's no need to write the env then.
# WriteDoCmd() will escape spaces for .d files. # WriteDoCmd() will escape spaces for .d files.
env = self.GetXcodeEnv() env = self.GetSortedXcodeEnv()
output = gyp.xcode_emulation.ExpandEnvVars(output, env) output = gyp.xcode_emulation.ExpandEnvVars(output, env)
path = gyp.xcode_emulation.ExpandEnvVars(path, env) path = gyp.xcode_emulation.ExpandEnvVars(path, env)
self.WriteDoCmd([output], [path], 'copy', part_of_all) self.WriteDoCmd([output], [path], 'copy', part_of_all)
@ -1086,7 +1090,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"""Write Makefile code for bundle Info.plist files.""" """Write Makefile code for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings, generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
self.Absolutify) lambda p: Sourceify(self.Absolutify(p)))
if not info_plist: if not info_plist:
return return
if defines: if defines:
@ -1102,7 +1106,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
'@plutil -convert xml1 $@ $@']) '@plutil -convert xml1 $@ $@'])
info_plist = intermediate_plist info_plist = intermediate_plist
# plists can contain envvars and substitute them into the file. # plists can contain envvars and substitute them into the file.
self.WriteXcodeEnv(out, self.GetXcodeEnv(additional_settings=extra_env)) self.WriteSortedXcodeEnv(
out, self.GetSortedXcodeEnv(additional_settings=extra_env))
self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist', self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
part_of_all=True) part_of_all=True)
bundle_deps.append(out) bundle_deps.append(out)
@ -1165,7 +1170,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteLn('# Add to the list of files we specially track ' self.WriteLn('# Add to the list of files we specially track '
'dependencies for.') 'dependencies for.')
self.WriteLn('all_deps += $(OBJS)') self.WriteLn('all_deps += $(OBJS)')
self._num_outputs += len(objs)
self.WriteLn() self.WriteLn()
# Make sure our dependencies are built first. # Make sure our dependencies are built first.
@ -1225,7 +1229,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"$(CFLAGS_CC_$(BUILDTYPE)) " "$(CFLAGS_CC_$(BUILDTYPE)) "
"$(CFLAGS_OBJCC_$(BUILDTYPE))") "$(CFLAGS_OBJCC_$(BUILDTYPE))")
self.WritePchTargets(precompiled_header.GetGchBuildCommands()) self.WritePchTargets(precompiled_header.GetPchBuildCommands())
# If there are any object files in our input file list, link them into our # If there are any object files in our input file list, link them into our
# output. # output.
@ -1384,8 +1388,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteMakeRule(extra_outputs, deps, self.WriteMakeRule(extra_outputs, deps,
comment=('Preserve order dependency of ' comment=('Preserve order dependency of '
'special output on deps.'), 'special output on deps.'),
order_only = True, order_only = True)
multiple_output_trick = False)
target_postbuilds = {} target_postbuilds = {}
if self.type != 'none': if self.type != 'none':
@ -1393,13 +1396,17 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
config = configs[configname] config = configs[configname]
if self.flavor == 'mac': if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(configname, ldflags = self.xcode_settings.GetLdflags(configname,
generator_default_variables['PRODUCT_DIR'], self.Absolutify) generator_default_variables['PRODUCT_DIR'],
lambda p: Sourceify(self.Absolutify(p)))
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
gyp_to_build = InvertRelativePath(self.path)
target_postbuild = self.xcode_settings.GetTargetPostbuilds( target_postbuild = self.xcode_settings.GetTargetPostbuilds(
configname, configname,
QuoteSpaces(self.output), QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
QuoteSpaces(self.output_binary)) self.output))),
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
self.output_binary))))
if target_postbuild: if target_postbuild:
target_postbuilds[configname] = target_postbuild target_postbuilds[configname] = target_postbuild
else: else:
@ -1430,13 +1437,13 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
if target_postbuilds: if target_postbuilds:
postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))') postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
postbuilds.extend( postbuilds.extend(
gyp.xcode_emulation.GetSpecPostbuildCommands(spec, self.Absolutify)) gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
if postbuilds: if postbuilds:
# Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
# so we must output its definition first, since we declare variables # so we must output its definition first, since we declare variables
# using ":=". # using ":=".
self.WriteXcodeEnv(self.output, self.GetXcodePostbuildEnv()) self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
for configname in target_postbuilds: for configname in target_postbuilds:
self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' % self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
@ -1444,6 +1451,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
configname, configname,
gyp.common.EncodePOSIXShellList(target_postbuilds[configname]))) gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
for i in xrange(len(postbuilds)): for i in xrange(len(postbuilds)):
if not postbuilds[i].startswith('$'): if not postbuilds[i].startswith('$'):
postbuilds[i] = EscapeShellArgument(postbuilds[i]) postbuilds[i] = EscapeShellArgument(postbuilds[i])
@ -1555,7 +1565,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
file_desc = 'executable' file_desc = 'executable'
install_path = self._InstallableTargetInstallPath() install_path = self._InstallableTargetInstallPath()
installable_deps = [self.output] installable_deps = [self.output]
if self.flavor == 'mac' and not 'product_dir' in spec: if (self.flavor == 'mac' and not 'product_dir' in spec and
self.toolset == 'target'):
# On mac, products are created in install_path immediately. # On mac, products are created in install_path immediately.
assert install_path == self.output, '%s != %s' % ( assert install_path == self.output, '%s != %s' % (
install_path, self.output) install_path, self.output)
@ -1614,12 +1625,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# other functions. # other functions.
outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
self.WriteLn('all_deps += %s' % ' '.join(outputs)) self.WriteLn('all_deps += %s' % ' '.join(outputs))
self._num_outputs += len(outputs)
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
order_only=False, force=False, phony=False, order_only=False, force=False, phony=False):
multiple_output_trick=True):
"""Write a Makefile rule, with some extra tricks. """Write a Makefile rule, with some extra tricks.
outputs: a list of outputs for the rule (note: this is not directly outputs: a list of outputs for the rule (note: this is not directly
@ -1632,8 +1641,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
force: if true, include FORCE_DO_CMD as an order-only dep force: if true, include FORCE_DO_CMD as an order-only dep
phony: if true, the rule does not actually generate the named output, the phony: if true, the rule does not actually generate the named output, the
output is just a name to run the rule output is just a name to run the rule
multiple_output_trick: if true (the default), perform tricks such as dummy
rules to avoid problems with multiple outputs.
""" """
outputs = map(QuoteSpaces, outputs) outputs = map(QuoteSpaces, outputs)
inputs = map(QuoteSpaces, inputs) inputs = map(QuoteSpaces, inputs)
@ -1645,20 +1652,22 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# TODO(evanm): just make order_only a list of deps instead of these hacks. # TODO(evanm): just make order_only a list of deps instead of these hacks.
if order_only: if order_only:
order_insert = '| ' order_insert = '| '
pick_output = ' '.join(outputs)
else: else:
order_insert = '' order_insert = ''
pick_output = outputs[0]
if force: if force:
force_append = ' FORCE_DO_CMD' force_append = ' FORCE_DO_CMD'
else: else:
force_append = '' force_append = ''
if actions: if actions:
self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
self.WriteLn('%s: %s%s%s' % (outputs[0], order_insert, ' '.join(inputs), self.WriteLn('%s: %s%s%s' % (pick_output, order_insert, ' '.join(inputs),
force_append)) force_append))
if actions: if actions:
for action in actions: for action in actions:
self.WriteLn('\t%s' % action) self.WriteLn('\t%s' % action)
if multiple_output_trick and len(outputs) > 1: if not order_only and len(outputs) > 1:
# If we have more than one output, a rule like # If we have more than one output, a rule like
# foo bar: baz # foo bar: baz
# that for *each* output we must run the action, potentially # that for *each* output we must run the action, potentially
@ -1768,37 +1777,33 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.fp.write(text + '\n') self.fp.write(text + '\n')
def GetXcodeEnv(self, additional_settings=None): def GetSortedXcodeEnv(self, additional_settings=None):
return gyp.xcode_emulation.GetXcodeEnv( return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, "$(abs_builddir)", self.xcode_settings, "$(abs_builddir)",
os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)", os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
additional_settings) additional_settings)
def GetXcodePostbuildEnv(self): def GetSortedXcodePostbuildEnv(self):
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead. # TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting( strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE') 'CHROMIUM_STRIP_SAVE_FILE', '')
if strip_save_file: # Even if strip_save_file is empty, explicitly write it. Else a postbuild
strip_save_file = self.Absolutify(strip_save_file) # might pick up an export from an earlier target.
else: return self.GetSortedXcodeEnv(
# Explicitly clear this out, else a postbuild might pick up an export
# from an earlier target.
strip_save_file = ''
return self.GetXcodeEnv(
additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file}) additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
def WriteXcodeEnv(self, target, env): def WriteSortedXcodeEnv(self, target, env):
for k in gyp.xcode_emulation.TopologicallySortedEnvVarKeys(env): for k, v in env:
# For # For
# foo := a\ b # foo := a\ b
# the escaped space does the right thing. For # the escaped space does the right thing. For
# export foo := a\ b # export foo := a\ b
# it does not -- the backslash is written to the env as literal character. # it does not -- the backslash is written to the env as literal character.
# So don't escape spaces in |env[k]|. # So don't escape spaces in |env[k]|.
self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, env[k])) self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
def Objectify(self, path): def Objectify(self, path):
@ -1829,12 +1834,6 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
return os.path.normpath(os.path.join(self.path, path)) return os.path.normpath(os.path.join(self.path, path))
def FixupArgPath(self, arg):
if '/' in arg or '.h.' in arg:
return self.Absolutify(arg)
return arg
def ExpandInputRoot(self, template, expansion, dirname): def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template return template
@ -1849,7 +1848,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
"""Returns the location of the final output for an installable target.""" """Returns the location of the final output for an installable target."""
# Xcode puts shared_library results into PRODUCT_DIR, and some gyp files # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
# rely on this. Emulate this behavior for mac. # rely on this. Emulate this behavior for mac.
if self.type == 'shared_library' and self.flavor != 'mac': if (self.type == 'shared_library' and
(self.flavor != 'mac' or self.toolset != 'target')):
# Install all shared libs into a common directory (per toolset) for # Install all shared libs into a common directory (per toolset) for
# convenient access with LD_LIBRARY_PATH. # convenient access with LD_LIBRARY_PATH.
return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias) return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
@ -2040,7 +2040,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
for target in gyp.common.AllTargets(target_list, target_dicts, build_file): for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target) needed_targets.add(target)
num_outputs = 0
build_files = set() build_files = set()
include_list = set() include_list = set()
for qualified_target in target_list: for qualified_target in target_list:
@ -2081,7 +2080,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
writer = MakefileWriter(generator_flags, flavor) writer = MakefileWriter(generator_flags, flavor)
writer.Write(qualified_target, base_path, output_file, spec, configs, writer.Write(qualified_target, base_path, output_file, spec, configs,
part_of_all=qualified_target in needed_targets) part_of_all=qualified_target in needed_targets)
num_outputs += writer.NumOutputs()
# Our root_makefile lives at the source root. Compute the relative path # Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including. # from there to the output_file for including.
@ -2128,22 +2126,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
if generator_flags.get('auto_regeneration', True): if generator_flags.get('auto_regeneration', True):
WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
# Write the rule to load dependencies. We batch 512 files at a time to root_makefile.write(SHARED_FOOTER)
# avoid overflowing the command line.
all_deps = ""
for i in range(513, num_outputs, 512):
all_deps += ("""
ifneq ($(word %(start)d,$(d_files)),)
$(shell cat $(wordlist %(start)d,%(end)d,$(d_files)) >> $(depsdir)/all.deps)
endif""" % { 'start': i, 'end': i + 999 })
# Add a check to make sure we tried to process all the .d files.
all_deps += """
ifneq ($(word %(last)d,$(d_files)),)
$(error Found unprocessed dependency files (gyp didn't generate enough rules!))
endif
""" % { 'last': ((num_outputs / 512) + 1) * 512 + 1 }
root_makefile.write(SHARED_FOOTER % { 'generate_all_deps': all_deps })
root_makefile.close() root_makefile.close()

343
tools/gyp/pylib/gyp/generator/msvs.py

@ -42,19 +42,7 @@ generator_default_variables = {
'SHARED_INTERMEDIATE_DIR': '$(OutDir)/obj/global_intermediate', 'SHARED_INTERMEDIATE_DIR': '$(OutDir)/obj/global_intermediate',
'OS': 'win', 'OS': 'win',
'PRODUCT_DIR': '$(OutDir)', 'PRODUCT_DIR': '$(OutDir)',
'LIB_DIR': '$(OutDir)\\lib',
# TODO(jeanluc) The way we currently generate libraries makes Visual
# Studio 2010 unhappy. We get a lot of warnings like:
# warning MSB8012: TargetPath(...\Debug\gles2_c_lib.lib) does not match
# the Library's OutputFile property value (...\Debug\lib\gles2_c_lib.lib).
# This may cause your project to build incorrectly. To correct this,
# please make sure that $(OutDir), $(TargetName) and $(TargetExt) property
# values match the value specified in %(Lib.OutputFile).
# Despite the warnings, this compile correctly. It would be nice to get rid
# of the warnings.
# TODO(jeanluc) I had: 'LIB_DIR': '$(OutDir)lib',
'LIB_DIR': '$(OutDir)/lib',
'RULE_INPUT_ROOT': '$(InputName)', 'RULE_INPUT_ROOT': '$(InputName)',
'RULE_INPUT_DIRNAME': '$(InputDir)', 'RULE_INPUT_DIRNAME': '$(InputDir)',
'RULE_INPUT_EXT': '$(InputExt)', 'RULE_INPUT_EXT': '$(InputExt)',
@ -254,7 +242,7 @@ def _ConfigFullName(config_name, config_data):
def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
quote_cmd): quote_cmd, do_setup_env):
if [x for x in cmd if '$(InputDir)' in x]: if [x for x in cmd if '$(InputDir)' in x]:
input_dir_preamble = ( input_dir_preamble = (
@ -285,9 +273,10 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
#direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) #direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
direct_cmd = ' '.join(direct_cmd) direct_cmd = ' '.join(direct_cmd)
# TODO(quote): regularize quoting path names throughout the module # TODO(quote): regularize quoting path names throughout the module
cmd = ( cmd = ''
'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' if do_setup_env:
'set CYGWIN=nontsec&& ') cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
cmd += 'set CYGWIN=nontsec&& '
if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0: if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& ' cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
if direct_cmd.find('INTDIR') >= 0: if direct_cmd.find('INTDIR') >= 0:
@ -319,10 +308,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
return input_dir_preamble + ' '.join(command + arguments) return input_dir_preamble + ' '.join(command + arguments)
def _BuildCommandLineForRule(spec, rule, has_input_path): def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
# Find path to cygwin.
cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
# Currently this weird argument munging is used to duplicate the way a # Currently this weird argument munging is used to duplicate the way a
# python script would need to be run as part of the chrome tree. # python script would need to be run as part of the chrome tree.
# Eventually we should add some sort of rule_default option to set this # Eventually we should add some sort of rule_default option to set this
@ -334,7 +320,7 @@ def _BuildCommandLineForRule(spec, rule, has_input_path):
mcs = int(mcs) mcs = int(mcs)
quote_cmd = int(rule.get('msvs_quote_cmd', 1)) quote_cmd = int(rule.get('msvs_quote_cmd', 1))
return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path, return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
quote_cmd) quote_cmd, do_setup_env=do_setup_env)
def _AddActionStep(actions_dict, inputs, outputs, description, command): def _AddActionStep(actions_dict, inputs, outputs, description, command):
@ -503,7 +489,11 @@ def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
rule_ext = r['extension'] rule_ext = r['extension']
inputs = _FixPaths(r.get('inputs', [])) inputs = _FixPaths(r.get('inputs', []))
outputs = _FixPaths(r.get('outputs', [])) outputs = _FixPaths(r.get('outputs', []))
cmd = _BuildCommandLineForRule(spec, r, has_input_path=True) # Skip a rule with no action and no inputs.
if 'action' not in r and not r.get('rule_sources', []):
continue
cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
do_setup_env=True)
rules_file.AddCustomBuildRule(name=rule_name, rules_file.AddCustomBuildRule(name=rule_name,
description=r.get('message', rule_name), description=r.get('message', rule_name),
extensions=[rule_ext], extensions=[rule_ext],
@ -591,7 +581,7 @@ def _GenerateExternalRules(rules, output_dir, spec,
'IntDir=$(IntDir)', 'IntDir=$(IntDir)',
'-j', '${NUMBER_OF_PROCESSORS_PLUS_1}', '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
'-f', filename] '-f', filename]
cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True) cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
# Insert makefile as 0'th input, so it gets the action attached there, # Insert makefile as 0'th input, so it gets the action attached there,
# as this is easier to understand from in the IDE. # as this is easier to understand from in the IDE.
all_inputs = list(all_inputs) all_inputs = list(all_inputs)
@ -599,7 +589,8 @@ def _GenerateExternalRules(rules, output_dir, spec,
_AddActionStep(actions_to_add, _AddActionStep(actions_to_add,
inputs=_FixPaths(all_inputs), inputs=_FixPaths(all_inputs),
outputs=_FixPaths(all_outputs), outputs=_FixPaths(all_outputs),
description='Running %s' % cmd, description='Running external rules for %s' %
spec['target_name'],
command=cmd) command=cmd)
@ -707,6 +698,9 @@ def _EscapeCppDefineForMSVS(s):
s = _EscapeEnvironmentVariableExpansion(s) s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSVS(s) s = _EscapeCommandLineArgumentForMSVS(s)
s = _EscapeVCProjCommandLineArgListItem(s) s = _EscapeVCProjCommandLineArgListItem(s)
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
s = s.replace('#', '\\%03o' % ord('#'))
return s return s
@ -743,6 +737,9 @@ def _EscapeCppDefineForMSBuild(s):
s = _EscapeEnvironmentVariableExpansion(s) s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSBuild(s) s = _EscapeCommandLineArgumentForMSBuild(s)
s = _EscapeMSBuildSpecialCharacters(s) s = _EscapeMSBuildSpecialCharacters(s)
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
s = s.replace('#', '\\%03o' % ord('#'))
return s return s
@ -833,6 +830,20 @@ def _GetGuidOfProject(proj_path, spec):
return guid return guid
def _GetMsbuildToolsetOfProject(proj_path, spec):
"""Get the platform toolset for the project.
Arguments:
proj_path: Path of the vcproj or vcxproj file to generate.
spec: The target dictionary containing the properties of the target.
Returns:
the platform toolset string or None.
"""
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
return default_config.get('msbuild_toolset')
def _GenerateProject(project, options, version, generator_flags): def _GenerateProject(project, options, version, generator_flags):
"""Generates a vcproj file. """Generates a vcproj file.
@ -841,17 +852,19 @@ def _GenerateProject(project, options, version, generator_flags):
options: global generator options. options: global generator options.
version: the MSVSVersion object. version: the MSVSVersion object.
generator_flags: dict of generator-specific flags. generator_flags: dict of generator-specific flags.
Returns:
A list of source files that cannot be found on disk.
""" """
default_config = _GetDefaultConfiguration(project.spec) default_config = _GetDefaultConfiguration(project.spec)
# Skip emitting anything if told to with msvs_existing_vcproj option. # Skip emitting anything if told to with msvs_existing_vcproj option.
if default_config.get('msvs_existing_vcproj'): if default_config.get('msvs_existing_vcproj'):
return return []
if version.UsesVcxproj(): if version.UsesVcxproj():
_GenerateMSBuildProject(project, options, version, generator_flags) return _GenerateMSBuildProject(project, options, version, generator_flags)
else: else:
_GenerateMSVSProject(project, options, version, generator_flags) return _GenerateMSVSProject(project, options, version, generator_flags)
def _GenerateMSVSProject(project, options, version, generator_flags): def _GenerateMSVSProject(project, options, version, generator_flags):
@ -896,7 +909,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
spec, options, project_dir, sources, excluded_sources, list_excluded)) spec, options, project_dir, sources, excluded_sources, list_excluded))
# Add in files. # Add in files.
_VerifySourcesExist(sources, project_dir) missing_sources = _VerifySourcesExist(sources, project_dir)
p.AddFiles(sources) p.AddFiles(sources)
_AddToolFilesToMSVS(p, spec) _AddToolFilesToMSVS(p, spec)
@ -916,6 +929,8 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
# Write it out. # Write it out.
p.WriteIfChanged() p.WriteIfChanged()
return missing_sources
def _GetUniquePlatforms(spec): def _GetUniquePlatforms(spec):
"""Returns the list of unique platforms for this spec, e.g ['win32', ...]. """Returns the list of unique platforms for this spec, e.g ['win32', ...].
@ -1121,9 +1136,6 @@ def _GetOutputFilePathAndTool(spec):
'executable': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.exe'), 'executable': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.exe'),
'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.dll'), 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.dll'),
'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.dll'), 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)\\', '.dll'),
# TODO(jeanluc) If we want to avoid the MSB8012 warnings in
# VisualStudio 2010, we will have to change the value of $(OutDir)
# to contain the \lib suffix, rather than doing it as below.
'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)\\lib\\', '.lib'), 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)\\lib\\', '.lib'),
} }
output_file_props = output_file_map.get(spec['type']) output_file_props = output_file_map.get(spec['type'])
@ -1164,7 +1176,7 @@ def _GetDisabledWarnings(config):
def _GetModuleDefinition(spec): def _GetModuleDefinition(spec):
def_file = '' def_file = ''
if spec['type'] in ['shared_library', 'loadable_module']: if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1: if len(def_files) == 1:
def_file = _FixPath(def_files[0]) def_file = _FixPath(def_files[0])
@ -1237,10 +1249,6 @@ def _GetMSVSAttributes(spec, config, config_type):
prepared_attrs['ConfigurationType'] = config_type prepared_attrs['ConfigurationType'] = config_type
output_dir = prepared_attrs.get('OutputDirectory', output_dir = prepared_attrs.get('OutputDirectory',
'$(SolutionDir)$(ConfigurationName)') '$(SolutionDir)$(ConfigurationName)')
# TODO(jeanluc) If we want to avoid the MSB8012 warning, we should
# add code like the following to place libraries in their own directory.
# if config_type == '4':
# output_dir = spec.get('product_dir', output_dir + '\\lib')
prepared_attrs['OutputDirectory'] = output_dir prepared_attrs['OutputDirectory'] = output_dir
if 'IntermediateDirectory' not in prepared_attrs: if 'IntermediateDirectory' not in prepared_attrs:
intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
@ -1276,7 +1284,7 @@ def _PrepareListOfSources(spec, gyp_file):
# Add in 'action' inputs and outputs. # Add in 'action' inputs and outputs.
for a in spec.get('actions', []): for a in spec.get('actions', []):
inputs = a.get('inputs', []) inputs = a['inputs']
inputs = [_NormalizedSource(i) for i in inputs] inputs = [_NormalizedSource(i) for i in inputs]
# Add all inputs to sources and excluded sources. # Add all inputs to sources and excluded sources.
inputs = set(inputs) inputs = set(inputs)
@ -1445,10 +1453,19 @@ def _HandlePreCompiledHeaders(p, sources, spec):
def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
# Add actions. # Add actions.
actions = spec.get('actions', []) actions = spec.get('actions', [])
# Don't setup_env every time. When all the actions are run together in one
# batch file in VS, the PATH will grow too long.
# Membership in this set means that the cygwin environment has been set up,
# and does not need to be set up again.
have_setup_env = set()
for a in actions: for a in actions:
cmd = _BuildCommandLineForRule(spec, a, has_input_path=False)
# Attach actions to the gyp file if nothing else is there. # Attach actions to the gyp file if nothing else is there.
inputs = a.get('inputs') or [relative_path_of_gyp_file] inputs = a.get('inputs') or [relative_path_of_gyp_file]
attached_to = inputs[0]
need_setup_env = attached_to not in have_setup_env
cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
do_setup_env=need_setup_env)
have_setup_env.add(attached_to)
# Add the action. # Add the action.
_AddActionStep(actions_to_add, _AddActionStep(actions_to_add,
inputs=inputs, inputs=inputs,
@ -1638,6 +1655,9 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
build_file=build_file, build_file=build_file,
config_platform_overrides=overrides, config_platform_overrides=overrides,
fixpath_prefix=fixpath_prefix) fixpath_prefix=fixpath_prefix)
# Set project toolset if any (MS build only)
if msvs_version.UsesVcxproj():
obj.set_msbuild_toolset(_GetMsbuildToolsetOfProject(proj_path, spec))
projects[qualified_target] = obj projects[qualified_target] = obj
# Set all the dependencies # Set all the dependencies
for project in projects.values(): for project in projects.values():
@ -1775,9 +1795,11 @@ def GenerateOutput(target_list, target_dicts, data, params):
msvs_version) msvs_version)
# Generate each project. # Generate each project.
missing_sources = []
for project in project_objects.values(): for project in project_objects.values():
fixpath_prefix = project.fixpath_prefix fixpath_prefix = project.fixpath_prefix
_GenerateProject(project, options, msvs_version, generator_flags) missing_sources.extend(_GenerateProject(project, options, msvs_version,
generator_flags))
fixpath_prefix = None fixpath_prefix = None
for build_file in data: for build_file in data:
@ -1801,6 +1823,14 @@ def GenerateOutput(target_list, target_dicts, data, params):
version=msvs_version) version=msvs_version)
sln.Write() sln.Write()
if missing_sources:
error_message = "Missing input files:\n" + \
'\n'.join(set(missing_sources))
if generator_flags.get('msvs_error_on_missing_sources', False):
raise Exception(error_message)
else:
print >>sys.stdout, "Warning: " + error_message
def _GenerateMSBuildFiltersFile(filters_path, source_files, def _GenerateMSBuildFiltersFile(filters_path, source_files,
extension_to_rule_name): extension_to_rule_name):
@ -1916,6 +1946,9 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
msbuild_rules = [] msbuild_rules = []
for rule in rules_native: for rule in rules_native:
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
msbuild_rule = MSBuildRule(rule, spec) msbuild_rule = MSBuildRule(rule, spec)
msbuild_rules.append(msbuild_rule) msbuild_rules.append(msbuild_rule)
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
@ -1953,6 +1986,7 @@ class MSBuildRule(object):
depends_on: The name of the DependsOn element. depends_on: The name of the DependsOn element.
compute_output: The name of the ComputeOutput element. compute_output: The name of the ComputeOutput element.
dirs_to_make: The name of the DirsToMake element. dirs_to_make: The name of the DirsToMake element.
inputs: The name of the _inputs element.
tlog: The name of the _tlog element. tlog: The name of the _tlog element.
extension: The extension this rule applies to. extension: The extension this rule applies to.
description: The message displayed when this rule is invoked. description: The message displayed when this rule is invoked.
@ -1974,6 +2008,7 @@ class MSBuildRule(object):
self.depends_on = self.rule_name + 'DependsOn' self.depends_on = self.rule_name + 'DependsOn'
self.compute_output = 'Compute%sOutput' % self.rule_name self.compute_output = 'Compute%sOutput' % self.rule_name
self.dirs_to_make = self.rule_name + 'DirsToMake' self.dirs_to_make = self.rule_name + 'DirsToMake'
self.inputs = self.rule_name + '_inputs'
self.tlog = self.rule_name + '_tlog' self.tlog = self.rule_name + '_tlog'
self.extension = rule['extension'] self.extension = rule['extension']
if not self.extension.startswith('.'): if not self.extension.startswith('.'):
@ -1988,7 +2023,8 @@ class MSBuildRule(object):
old_outputs = _FixPaths(rule.get('outputs', [])) old_outputs = _FixPaths(rule.get('outputs', []))
self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
for i in old_outputs]) for i in old_outputs])
old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True) old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
do_setup_env=True)
self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
@ -2072,6 +2108,10 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
} }
] ]
] ]
inputs_section = [
'ItemGroup',
[rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
]
logging_section = [ logging_section = [
'ItemGroup', 'ItemGroup',
[rule.tlog, [rule.tlog,
@ -2081,6 +2121,7 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
(rule_name, rule_name)) (rule_name, rule_name))
}, },
['Source', "@(%s, '|')" % rule_name], ['Source', "@(%s, '|')" % rule_name],
['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
], ],
] ]
message_section = [ message_section = [
@ -2089,7 +2130,7 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
'Text': '%%(%s.ExecutionDescription)' % rule_name 'Text': '%%(%s.ExecutionDescription)' % rule_name
} }
] ]
write_lines_section = [ write_tlog_section = [
'WriteLinesToFile', 'WriteLinesToFile',
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog), "'true'" % (rule.tlog, rule.tlog),
@ -2098,6 +2139,14 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
rule.tlog) rule.tlog)
} }
] ]
read_tlog_section = [
'WriteLinesToFile',
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
'File': '$(IntDir)$(ProjectName).read.1.tlog',
'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
}
]
command_and_input_section = [ command_and_input_section = [
rule_name, rule_name,
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
@ -2119,9 +2168,11 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
'Inputs': target_inputs 'Inputs': target_inputs
}, },
remove_section, remove_section,
inputs_section,
logging_section, logging_section,
message_section, message_section,
write_lines_section, write_tlog_section,
read_tlog_section,
command_and_input_section, command_and_input_section,
], ],
['PropertyGroup', ['PropertyGroup',
@ -2373,16 +2424,12 @@ def _GetMSBuildProjectConfigurations(configurations):
def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
prefix = spec.get('product_prefix', '')
product_name = spec.get('product_name', '$(ProjectName)')
target_name = prefix + product_name
namespace = os.path.splitext(gyp_file_name)[0] namespace = os.path.splitext(gyp_file_name)[0]
return [ return [
['PropertyGroup', {'Label': 'Globals'}, ['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', guid], ['ProjectGuid', guid],
['Keyword', 'Win32Proj'], ['Keyword', 'Win32Proj'],
['RootNamespace', namespace], ['RootNamespace', namespace],
['TargetName', target_name],
] ]
] ]
@ -2401,24 +2448,66 @@ def _GetMSBuildConfigurationDetails(spec, build_file):
return _GetMSBuildPropertyGroup(spec, 'Configuration', properties) return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
def _GetMSBuildLocalProperties(msbuild_toolset):
# Currently the only local property we support is PlatformToolset
properties = {}
if msbuild_toolset:
properties = [
['PropertyGroup', {'Label': 'Locals'},
['PlatformToolset', msbuild_toolset],
]
]
return properties
def _GetMSBuildPropertySheets(configurations): def _GetMSBuildPropertySheets(configurations):
user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
return [ additional_props = {}
['ImportGroup', props_specified = False
{'Label': 'PropertySheets'}, for name, settings in sorted(configurations.iteritems()):
['Import', configuration = _GetConfigurationCondition(name, settings)
{'Project': user_props, if settings.has_key('msbuild_props'):
'Condition': "exists('%s')" % user_props, additional_props[configuration] = _FixPaths(settings['msbuild_props'])
'Label': 'LocalAppDataPlatform' props_specified = True
} else:
] additional_props[configuration] = ''
]
if not props_specified:
return [
['ImportGroup',
{'Label': 'PropertySheets'},
['Import',
{'Project': user_props,
'Condition': "exists('%s')" % user_props,
'Label': 'LocalAppDataPlatform'
}
]
]
] ]
else:
sheets = []
for condition, props in additional_props.iteritems():
import_group = [
'ImportGroup',
{'Label': 'PropertySheets',
'Condition': condition
},
['Import',
{'Project': user_props,
'Condition': "exists('%s')" % user_props,
'Label': 'LocalAppDataPlatform'
}
]
]
for props_file in props:
import_group.append(['Import', {'Project':props_file}])
sheets.append(import_group)
return sheets
def _ConvertMSVSBuildAttributes(spec, config, build_file):
def _GetMSBuildAttributes(spec, config, build_file):
# Use the MSVS attributes and convert them. In the future, we may want to
# support Gyp files specifying 'msbuild_configuration_attributes' directly.
config_type = _GetMSVSConfigurationType(spec, build_file) config_type = _GetMSVSConfigurationType(spec, build_file)
msvs_attributes = _GetMSVSAttributes(spec, config, config_type) msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
msbuild_attributes = {} msbuild_attributes = {}
@ -2429,22 +2518,77 @@ def _GetMSBuildAttributes(spec, config, build_file):
directory += '\\' directory += '\\'
msbuild_attributes[a] = directory msbuild_attributes[a] = directory
elif a == 'CharacterSet': elif a == 'CharacterSet':
msbuild_attributes[a] = { msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
'0': 'MultiByte',
'1': 'Unicode'
}[msvs_attributes[a]]
elif a == 'ConfigurationType': elif a == 'ConfigurationType':
msbuild_attributes[a] = { msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
'1': 'Application',
'2': 'DynamicLibrary',
'4': 'StaticLibrary',
'10': 'Utility'
}[msvs_attributes[a]]
else: else:
print 'Warning: Do not know how to convert MSVS attribute ' + a print 'Warning: Do not know how to convert MSVS attribute ' + a
return msbuild_attributes return msbuild_attributes
def _ConvertMSVSCharacterSet(char_set):
if char_set.isdigit():
char_set = {
'0': 'MultiByte',
'1': 'Unicode',
'2': 'MultiByte',
}[char_set]
return char_set
def _ConvertMSVSConfigurationType(config_type):
if config_type.isdigit():
config_type = {
'1': 'Application',
'2': 'DynamicLibrary',
'4': 'StaticLibrary',
'10': 'Utility'
}[config_type]
return config_type
def _GetMSBuildAttributes(spec, config, build_file):
if 'msbuild_configuration_attributes' not in config:
msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
else:
config_type = _GetMSVSConfigurationType(spec, build_file)
config_type = _ConvertMSVSConfigurationType(config_type)
msbuild_attributes = config.get('msbuild_configuration_attributes', {})
msbuild_attributes['ConfigurationType'] = config_type
output_dir = msbuild_attributes.get('OutputDirectory',
'$(SolutionDir)$(Configuration)\\')
msbuild_attributes['OutputDirectory'] = output_dir
if 'IntermediateDirectory' not in msbuild_attributes:
intermediate = '$(Configuration)\\'
msbuild_attributes['IntermediateDirectory'] = intermediate
if 'CharacterSet' in msbuild_attributes:
msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
msbuild_attributes['CharacterSet'])
if 'TargetName' not in msbuild_attributes:
prefix = spec.get('product_prefix', '')
product_name = spec.get('product_name', '$(ProjectName)')
target_name = prefix + product_name
msbuild_attributes['TargetName'] = target_name
# Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
# (depending on the tool used) to avoid MSB8012 warning.
msbuild_tool_map = {
'executable': 'Link',
'shared_library': 'Link',
'loadable_module': 'Link',
'static_library': 'Lib',
}
msbuild_tool = msbuild_tool_map.get(spec['type'])
if msbuild_tool:
msbuild_settings = config['finalized_msbuild_settings']
out_file = msbuild_settings[msbuild_tool].get('OutputFile')
if out_file:
msbuild_attributes['TargetPath'] = out_file
return msbuild_attributes
def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
# TODO(jeanluc) We could optimize out the following and do it only if # TODO(jeanluc) We could optimize out the following and do it only if
# there are actions. # there are actions.
@ -2470,6 +2614,13 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
attributes['IntermediateDirectory']) attributes['IntermediateDirectory'])
_AddConditionalProperty(properties, condition, 'OutDir', _AddConditionalProperty(properties, condition, 'OutDir',
attributes['OutputDirectory']) attributes['OutputDirectory'])
_AddConditionalProperty(properties, condition, 'TargetName',
attributes['TargetName'])
if attributes.get('TargetPath'):
_AddConditionalProperty(properties, condition, 'TargetPath',
attributes['TargetPath'])
if new_paths: if new_paths:
_AddConditionalProperty(properties, condition, 'ExecutablePath', _AddConditionalProperty(properties, condition, 'ExecutablePath',
new_paths) new_paths)
@ -2500,6 +2651,10 @@ def _AddConditionalProperty(properties, condition, name, value):
conditions.append(condition) conditions.append(condition)
# Regex for msvs variable references ( i.e. $(FOO) ).
MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
def _GetMSBuildPropertyGroup(spec, label, properties): def _GetMSBuildPropertyGroup(spec, label, properties):
"""Returns a PropertyGroup definition for the specified properties. """Returns a PropertyGroup definition for the specified properties.
@ -2514,7 +2669,31 @@ def _GetMSBuildPropertyGroup(spec, label, properties):
if label: if label:
group.append({'Label': label}) group.append({'Label': label})
num_configurations = len(spec['configurations']) num_configurations = len(spec['configurations'])
for name, values in sorted(properties.iteritems()): def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
edges = set()
for value in sorted(properties[node].keys()):
# Add to edges all $(...) references to variables.
#
# Variable references that refer to names not in properties are excluded
# These can exist for instance to refer built in definitions like
# $(SolutionDir).
#
# Self references are ignored. Self reference is used in a few places to
# append to the default value. I.e. PATH=$(PATH);other_path
edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
if v in properties and v != node]))
return edges
properties_ordered = gyp.common.TopologicallySorted(
properties.keys(), GetEdges)
# Walk properties in the reverse of a topological sort on
# user_of_variable -> used_variable as this ensures variables are
# defined before they are used.
# NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
for name in reversed(properties_ordered):
values = properties[name]
for value, conditions in sorted(values.iteritems()): for value, conditions in sorted(values.iteritems()):
if len(conditions) == num_configurations: if len(conditions) == num_configurations:
# If the value is the same all configurations, # If the value is the same all configurations,
@ -2658,16 +2837,19 @@ def _VerifySourcesExist(sources, root_dir):
Arguments: Arguments:
sources: A recursive list of Filter/file names. sources: A recursive list of Filter/file names.
root_dir: The root directory for the relative path names. root_dir: The root directory for the relative path names.
Returns:
A list of source files that cannot be found on disk.
""" """
missing_sources = []
for source in sources: for source in sources:
if isinstance(source, MSVSProject.Filter): if isinstance(source, MSVSProject.Filter):
_VerifySourcesExist(source.contents, root_dir) missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
else: else:
if '$' not in source: if '$' not in source:
full_path = os.path.join(root_dir, source) full_path = os.path.join(root_dir, source)
if not os.path.exists(full_path): if not os.path.exists(full_path):
print 'Warning: Missing input file ' + full_path + ' pwd=' +\ missing_sources.append(full_path)
os.getcwd() return missing_sources
def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name, def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
@ -2808,7 +2990,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
_GenerateMSBuildFiltersFile(project.path + '.filters', sources, _GenerateMSBuildFiltersFile(project.path + '.filters', sources,
extension_to_rule_name) extension_to_rule_name)
_VerifySourcesExist(sources, project_dir) missing_sources = _VerifySourcesExist(sources, project_dir)
for (_, configuration) in configurations.iteritems(): for (_, configuration) in configurations.iteritems():
_FinalizeMSBuildSettings(spec, configuration) _FinalizeMSBuildSettings(spec, configuration)
@ -2834,6 +3016,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name) content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
content += import_default_section content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file) content += _GetMSBuildConfigurationDetails(spec, project.build_file)
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
content += import_cpp_props_section content += import_cpp_props_section
content += _GetMSBuildExtensions(props_files_of_rules) content += _GetMSBuildExtensions(props_files_of_rules)
content += _GetMSBuildPropertySheets(configurations) content += _GetMSBuildPropertySheets(configurations)
@ -2853,6 +3036,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
easy_xml.WriteXmlIfChanged(content, project.path) easy_xml.WriteXmlIfChanged(content, project.path)
return missing_sources
def _GetMSBuildExtensions(props_files_of_rules): def _GetMSBuildExtensions(props_files_of_rules):
extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}] extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
@ -2900,7 +3085,13 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
commands.append(cmd) commands.append(cmd)
# Add the custom build action for one input file. # Add the custom build action for one input file.
description = ', and also '.join(descriptions) description = ', and also '.join(descriptions)
command = ' && '.join(commands)
# We can't join the commands simply with && because the command line will
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
# for every invocation or the command that sets the PATH will grow too
# long.
command = (
'\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands))
_AddMSBuildAction(spec, _AddMSBuildAction(spec,
primary_input, primary_input,
inputs, inputs,

715
tools/gyp/pylib/gyp/generator/ninja.py

File diff suppressed because it is too large

48
tools/gyp/pylib/gyp/generator/ninja_test.py

@ -9,34 +9,36 @@
import gyp.generator.ninja as ninja import gyp.generator.ninja as ninja
import unittest import unittest
import StringIO import StringIO
import sys
import TestCommon import TestCommon
class TestPrefixesAndSuffixes(unittest.TestCase): class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self): if sys.platform in ('win32', 'cygwin'):
writer = ninja.NinjaWriter('wee', '.', '.', 'ninja.build', 'win') def test_BinaryNamesWindows(self):
spec = { 'target_name': 'wee' } writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'win')
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable'). spec = { 'target_name': 'wee' }
endswith('.exe')) self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). endswith('.exe'))
endswith('.dll')) self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). endswith('.dll'))
endswith('.lib')) self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.lib'))
def test_BinaryNamesLinux(self): if sys.platform == 'linux2':
writer = ninja.NinjaWriter('wee', '.', '.', 'ninja.build', 'linux') def test_BinaryNamesLinux(self):
spec = { writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'linux')
'target_name': 'wee' spec = { 'target_name': 'wee' }
} self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
self.assertTrue('.' not in writer.ComputeOutputFileName(spec, 'executable')) 'executable'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
startswith('lib')) startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
startswith('lib')) startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
endswith('.so')) endswith('.so'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.a')) endswith('.a'))
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

9
tools/gyp/pylib/gyp/generator/scons.py

@ -1,4 +1,4 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -462,8 +462,7 @@ def GenerateSConscript(output_filename, spec, build_file, build_file_data):
rules = spec.get('rules', []) rules = spec.get('rules', [])
for rule in rules: for rule in rules:
name = rule['rule_name'] name = re.sub('[^a-zA-Z0-9_]', '_', rule['rule_name'])
a = ['cd', src_subdir, '&&'] + rule['action']
message = rule.get('message') message = rule.get('message')
if message: if message:
message = repr(message) message = repr(message)
@ -473,6 +472,10 @@ def GenerateSConscript(output_filename, spec, build_file, build_file_data):
poas_line = '_processed_input_files.append(infile)' poas_line = '_processed_input_files.append(infile)'
inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])] inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])]
outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])] outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])]
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
a = ['cd', src_subdir, '&&'] + rule['action']
fp.write(_rule_template % { fp.write(_rule_template % {
'inputs' : pprint.pformat(inputs), 'inputs' : pprint.pformat(inputs),
'outputs' : pprint.pformat(outputs), 'outputs' : pprint.pformat(outputs),

32
tools/gyp/pylib/gyp/generator/xcode.py

@ -1,4 +1,4 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -513,7 +513,7 @@ def InstalledXcodeVersion():
return cached_xcode_version return cached_xcode_version
def AddSourceToTarget(source, pbxp, xct): def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a # TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier. # little bit fancier.
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's'] source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
@ -529,9 +529,9 @@ def AddSourceToTarget(source, pbxp, xct):
if ext != '': if ext != '':
ext = ext[1:].lower() ext = ext[1:].lower()
if ext in source_extensions: if ext in source_extensions and type != 'none':
xct.SourcesPhase().AddFile(source) xct.SourcesPhase().AddFile(source)
elif ext in library_extensions: elif ext in library_extensions and type != 'none':
xct.FrameworksPhase().AddFile(source) xct.FrameworksPhase().AddFile(source)
else: else:
# Files that aren't added to a sources or frameworks build phase can still # Files that aren't added to a sources or frameworks build phase can still
@ -699,7 +699,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# Xcode has some "issues" with checking dependencies for the "Compile # Xcode has some "issues" with checking dependencies for the "Compile
# sources" step with any source files/headers generated by actions/rules. # sources" step with any source files/headers generated by actions/rules.
# To work around this, if a target is building anything directly (not # To work around this, if a target is building anything directly (not
# type "none"), then a second target as used to run the GYP actions/rules # type "none"), then a second target is used to run the GYP actions/rules
# and is made a dependency of this target. This way the work is done # and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled. # before the dependency checks for what should be recompiled.
support_xct = None support_xct = None
@ -772,7 +772,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# TODO(mark): Should verify that at most one of these is specified. # TODO(mark): Should verify that at most one of these is specified.
if int(action.get('process_outputs_as_sources', False)): if int(action.get('process_outputs_as_sources', False)):
for output in action['outputs']: for output in action['outputs']:
AddSourceToTarget(output, pbxp, xct) AddSourceToTarget(output, type, pbxp, xct)
if int(action.get('process_outputs_as_mac_bundle_resources', False)): if int(action.get('process_outputs_as_mac_bundle_resources', False)):
for output in action['outputs']: for output in action['outputs']:
@ -900,7 +900,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# TODO(mark): Should verify that at most one of these is specified. # TODO(mark): Should verify that at most one of these is specified.
if int(rule.get('process_outputs_as_sources', False)): if int(rule.get('process_outputs_as_sources', False)):
for output in concrete_outputs_for_this_rule_source: for output in concrete_outputs_for_this_rule_source:
AddSourceToTarget(output, pbxp, xct) AddSourceToTarget(output, type, pbxp, xct)
# If the file came from the mac_bundle_resources list or if the rule # If the file came from the mac_bundle_resources list or if the rule
# is marked to process outputs as bundle resource, do so. # is marked to process outputs as bundle resource, do so.
@ -926,7 +926,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
if len(concrete_outputs_all) > 0: if len(concrete_outputs_all) > 0:
# TODO(mark): There's a possibilty for collision here. Consider # TODO(mark): There's a possibilty for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r". # target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = '%s_%s.make' % (target_name, rule['rule_name']) makefile_name = '%s.make' % re.sub(
'[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
makefile_path = os.path.join(xcode_projects[build_file].path, makefile_path = os.path.join(xcode_projects[build_file].path,
makefile_name) makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only # TODO(mark): try/close? Write to a temporary file and swap it only
@ -1064,12 +1065,12 @@ exit 1
if source_extension[1:] not in rules_by_ext: if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not # AddSourceToTarget will add the file to a root group if it's not
# already there. # already there.
AddSourceToTarget(source, pbxp, xct) AddSourceToTarget(source, type, pbxp, xct)
else: else:
pbxp.AddOrGetFileInRootGroup(source) pbxp.AddOrGetFileInRootGroup(source)
# Add "mac_bundle_resources", "mac_framework_headers", and # Add "mac_bundle_resources" and "mac_framework_private_headers" if
# "mac_framework_private_headers" if it's a bundle of any type. # it's a bundle of any type.
if is_bundle: if is_bundle:
for resource in tgt_mac_bundle_resources: for resource in tgt_mac_bundle_resources:
(resource_root, resource_extension) = posixpath.splitext(resource) (resource_root, resource_extension) = posixpath.splitext(resource)
@ -1078,12 +1079,15 @@ exit 1
else: else:
pbxp.AddOrGetFileInRootGroup(resource) pbxp.AddOrGetFileInRootGroup(resource)
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
for header in spec.get('mac_framework_private_headers', []): for header in spec.get('mac_framework_private_headers', []):
AddHeaderToTarget(header, pbxp, xct, False) AddHeaderToTarget(header, pbxp, xct, False)
# Add "mac_framework_headers". These can be valid for both frameworks
# and static libraries.
if is_bundle or type == 'static_library':
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
# Add "copies". # Add "copies".
for copy_group in spec.get('copies', []): for copy_group in spec.get('copies', []):
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({

673
tools/gyp/pylib/gyp/input.py

@ -1,4 +1,4 @@
# Copyright (c) 2011 Google Inc. All rights reserved. # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
@ -302,7 +302,6 @@ def ProcessToolsetsInDict(data):
if 'toolset' in target and 'toolsets' not in target: if 'toolset' in target and 'toolsets' not in target:
new_target_list.append(target) new_target_list.append(target)
continue continue
global multiple_toolsets
if multiple_toolsets: if multiple_toolsets:
toolsets = target.get('toolsets', ['target']) toolsets = target.get('toolsets', ['target'])
else: else:
@ -331,8 +330,6 @@ def ProcessToolsetsInDict(data):
# that contains the targets... # that contains the targets...
def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
depth, check): depth, check):
global absolute_build_file_paths
# If depth is set, predefine the DEPTH variable to be a relative path from # If depth is set, predefine the DEPTH variable to be a relative path from
# this build file's directory to the directory identified by depth. # this build file's directory to the directory identified by depth.
if depth: if depth:
@ -383,8 +380,8 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
ProcessToolsetsInDict(build_file_data) ProcessToolsetsInDict(build_file_data)
# Apply "pre"/"early" variable expansions and condition evaluations. # Apply "pre"/"early" variable expansions and condition evaluations.
ProcessVariablesAndConditionsInDict(build_file_data, False, variables, ProcessVariablesAndConditionsInDict(
build_file_path) build_file_data, PHASE_EARLY, variables, build_file_path)
# Since some toolsets might have been defined conditionally, perform # Since some toolsets might have been defined conditionally, perform
# a second round of toolsets expansion now. # a second round of toolsets expansion now.
@ -499,6 +496,13 @@ late_variable_re = re.compile(
'\((?P<is_array>\s*\[?)' '\((?P<is_array>\s*\[?)'
'(?P<content>.*?)(\]?)\))') '(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '^' instead of '<'.
latelate_variable_re = re.compile(
'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
'(?P<command_string>[-a-zA-Z0-9_.]+)?'
'\((?P<is_array>\s*\[?)'
'(?P<content>.*?)(\]?)\))')
# Global cache of results from running commands so they don't have to be run # Global cache of results from running commands so they don't have to be run
# more then once. # more then once.
cached_command_results = {} cached_command_results = {}
@ -513,263 +517,297 @@ def FixupPlatformCommand(cmd):
return cmd return cmd
def ExpandVariables(input, is_late, variables, build_file): PHASE_EARLY = 0
PHASE_LATE = 1
PHASE_LATELATE = 2
def ExpandVariables(input, phase, variables, build_file):
# Look for the pattern that gets expanded into variables # Look for the pattern that gets expanded into variables
if not is_late: if phase == PHASE_EARLY:
variable_re = early_variable_re variable_re = early_variable_re
expansion_symbol = '<' expansion_symbol = '<'
else: elif phase == PHASE_LATE:
variable_re = late_variable_re variable_re = late_variable_re
expansion_symbol = '>' expansion_symbol = '>'
elif phase == PHASE_LATELATE:
variable_re = latelate_variable_re
expansion_symbol = '^'
else:
assert False
input_str = str(input) input_str = str(input)
if IsStrCanonicalInt(input_str):
return int(input_str)
# Do a quick scan to determine if an expensive regex search is warranted. # Do a quick scan to determine if an expensive regex search is warranted.
if expansion_symbol in input_str: if expansion_symbol not in input_str:
# Get the entire list of matches as a list of MatchObject instances. return input_str
# (using findall here would return strings instead of MatchObjects).
matches = [match for match in variable_re.finditer(input_str)] # Get the entire list of matches as a list of MatchObject instances.
else: # (using findall here would return strings instead of MatchObjects).
matches = None matches = [match for match in variable_re.finditer(input_str)]
if not matches:
return input_str
output = input_str output = input_str
if matches: # Reverse the list of matches so that replacements are done right-to-left.
# Reverse the list of matches so that replacements are done right-to-left. # That ensures that earlier replacements won't mess up the string in a
# That ensures that earlier replacements won't mess up the string in a # way that causes later calls to find the earlier substituted text instead
# way that causes later calls to find the earlier substituted text instead # of what's intended for replacement.
# of what's intended for replacement. matches.reverse()
matches.reverse() for match_group in matches:
for match_group in matches: match = match_group.groupdict()
match = match_group.groupdict() gyp.DebugOutput(gyp.DEBUG_VARIABLES,
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %s" % repr(match))
"Matches: %s" % repr(match)) # match['replace'] is the substring to look for, match['type']
# match['replace'] is the substring to look for, match['type'] # is the character code for the replacement type (< > <! >! <| >| <@
# is the character code for the replacement type (< > <! >! <| >| <@ # >@ <!@ >!@), match['is_array'] contains a '[' for command
# >@ <!@ >!@), match['is_array'] contains a '[' for command # arrays, and match['content'] is the name of the variable (< >)
# arrays, and match['content'] is the name of the variable (< >) # or command to run (<! >!). match['command_string'] is an optional
# or command to run (<! >!). match['command_string'] is an optional # command string. Currently, only 'pymod_do_main' is supported.
# command string. Currently, only 'pymod_do_main' is supported.
# run_command is true if a ! variant is used.
# run_command is true if a ! variant is used. run_command = '!' in match['type']
run_command = '!' in match['type'] command_string = match['command_string']
command_string = match['command_string']
# file_list is true if a | variant is used.
# file_list is true if a | variant is used. file_list = '|' in match['type']
file_list = '|' in match['type']
# Capture these now so we can adjust them later.
# Capture these now so we can adjust them later. replace_start = match_group.start('replace')
replace_start = match_group.start('replace') replace_end = match_group.end('replace')
replace_end = match_group.end('replace')
# Find the ending paren, and re-evaluate the contained string.
# Find the ending paren, and re-evaluate the contained string. (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
(c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
# Adjust the replacement range to match the entire command
# Adjust the replacement range to match the entire command # found by FindEnclosingBracketGroup (since the variable_re
# found by FindEnclosingBracketGroup (since the variable_re # probably doesn't match the entire command if it contained
# probably doesn't match the entire command if it contained # nested variables).
# nested variables). replace_end = replace_start + c_end
replace_end = replace_start + c_end
# Find the "real" replacement, matching the appropriate closing
# Find the "real" replacement, matching the appropriate closing # paren, and adjust the replacement start and end.
# paren, and adjust the replacement start and end. replacement = input_str[replace_start:replace_end]
replacement = input_str[replace_start:replace_end]
# Figure out what the contents of the variable parens are.
# Figure out what the contents of the variable parens are. contents_start = replace_start + c_start + 1
contents_start = replace_start + c_start + 1 contents_end = replace_end - 1
contents_end = replace_end - 1 contents = input_str[contents_start:contents_end]
contents = input_str[contents_start:contents_end]
# Do filter substitution now for <|().
# Do filter substitution now for <|(). # Admittedly, this is different than the evaluation order in other
# Admittedly, this is different than the evaluation order in other # contexts. However, since filtration has no chance to run on <|(),
# contexts. However, since filtration has no chance to run on <|(), # this seems like the only obvious way to give them access to filters.
# this seems like the only obvious way to give them access to filters. if file_list:
if file_list: processed_variables = copy.deepcopy(variables)
processed_variables = copy.deepcopy(variables) ProcessListFiltersInDict(contents, processed_variables)
ProcessListFiltersInDict(contents, processed_variables) # Recurse to expand variables in the contents
# Recurse to expand variables in the contents contents = ExpandVariables(contents, phase,
contents = ExpandVariables(contents, is_late, processed_variables, build_file)
processed_variables, build_file) else:
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase, variables, build_file)
# Strip off leading/trailing whitespace so that variable matches are
# simpler below (and because they are rarely needed).
contents = contents.strip()
# expand_to_list is true if an @ variant is used. In that case,
# the expansion should result in a list. Note that the caller
# is to be expecting a list in return, and not all callers do
# because not all are working in list context. Also, for list
# expansions, there can be no other text besides the variable
# expansion in the input string.
expand_to_list = '@' in match['type'] and input_str == replacement
if run_command or file_list:
# Find the build file's directory, so commands can be run or file lists
# generated relative to it.
build_file_dir = os.path.dirname(build_file)
if build_file_dir == '':
# If build_file is just a leaf filename indicating a file in the
# current directory, build_file_dir might be an empty string. Set
# it to None to signal to subprocess.Popen that it should run the
# command in the current directory.
build_file_dir = None
# Support <|(listfile.txt ...) which generates a file
# containing items from a gyp list, generated at gyp time.
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
if type(contents) == list:
contents_list = contents
else: else:
# Recurse to expand variables in the contents contents_list = contents.split(' ')
contents = ExpandVariables(contents, is_late, variables, build_file) replacement = contents_list[0]
path = replacement
# Strip off leading/trailing whitespace so that variable matches are if not os.path.isabs(path):
# simpler below (and because they are rarely needed). path = os.path.join(build_file_dir, path)
contents = contents.strip() f = gyp.common.WriteOnDiff(path)
for i in contents_list[1:]:
# expand_to_list is true if an @ variant is used. In that case, f.write('%s\n' % i)
# the expansion should result in a list. Note that the caller f.close()
# is to be expecting a list in return, and not all callers do
# because not all are working in list context. Also, for list elif run_command:
# expansions, there can be no other text besides the variable use_shell = True
# expansion in the input string. if match['is_array']:
expand_to_list = '@' in match['type'] and input_str == replacement contents = eval(contents)
use_shell = False
if run_command or file_list:
# Find the build file's directory, so commands can be run or file lists # Check for a cached value to avoid executing commands, or generating
# generated relative to it. # file lists more than once.
build_file_dir = os.path.dirname(build_file) # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
if build_file_dir == '': # possible that the command being invoked depends on the current
# If build_file is just a leaf filename indicating a file in the # directory. For that case the syntax needs to be extended so that the
# current directory, build_file_dir might be an empty string. Set # directory is also used in cache_key (it becomes a tuple).
# it to None to signal to subprocess.Popen that it should run the # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
# command in the current directory. # someone could author a set of GYP files where each time the command
build_file_dir = None # is invoked it produces different output by design. When the need
# arises, the syntax should be extended to support no caching off a
# Support <|(listfile.txt ...) which generates a file # command's output so it is run every time.
# containing items from a gyp list, generated at gyp time. cache_key = str(contents)
# This works around actions/rules which have more inputs than will cached_value = cached_command_results.get(cache_key, None)
# fit on the command line. if cached_value is None:
if file_list: gyp.DebugOutput(gyp.DEBUG_VARIABLES,
if type(contents) == list: "Executing command '%s' in directory '%s'" %
contents_list = contents (contents,build_file_dir))
else:
contents_list = contents.split(' ') replacement = ''
replacement = contents_list[0]
path = replacement if command_string == 'pymod_do_main':
if not os.path.isabs(path): # <!pymod_do_main(modulename param eters) loads |modulename| as a
path = os.path.join(build_file_dir, path) # python module and then calls that module's DoMain() function,
f = gyp.common.WriteOnDiff(path) # passing ["param", "eters"] as a single list argument. For modules
for i in contents_list[1:]: # that don't load quickly, this can be faster than
f.write('%s\n' % i) # <!(python modulename param eters). Do this in |build_file_dir|.
f.close() oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
os.chdir(build_file_dir)
elif run_command:
use_shell = True parsed_contents = shlex.split(contents)
if match['is_array']: py_module = __import__(parsed_contents[0])
contents = eval(contents) replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
use_shell = False
os.chdir(oldwd)
# Check for a cached value to avoid executing commands, or generating assert replacement != None
# file lists more than once. elif command_string:
# TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is raise Exception("Unknown command string '%s' in '%s'." %
# possible that the command being invoked depends on the current (command_string, contents))
# directory. For that case the syntax needs to be extended so that the
# directory is also used in cache_key (it becomes a tuple).
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
# someone could author a set of GYP files where each time the command
# is invoked it produces different output by design. When the need
# arises, the syntax should be extended to support no caching off a
# command's output so it is run every time.
cache_key = str(contents)
cached_value = cached_command_results.get(cache_key, None)
if cached_value is None:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Executing command '%s' in directory '%s'" %
(contents,build_file_dir))
replacement = ''
if command_string == 'pymod_do_main':
# <!pymod_do_main(modulename param eters) loads |modulename| as a
# python module and then calls that module's DoMain() function,
# passing ["param", "eters"] as a single list argument. For modules
# that don't load quickly, this can be faster than
# <!(python modulename param eters). Do this in |build_file_dir|.
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
os.chdir(build_file_dir)
parsed_contents = shlex.split(contents)
py_module = __import__(parsed_contents[0])
replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
os.chdir(oldwd)
assert replacement != None
elif command_string:
raise Exception("Unknown command string '%s' in '%s'." %
(command_string, contents))
else:
# Fix up command with platform specific workarounds.
contents = FixupPlatformCommand(contents)
p = subprocess.Popen(contents, shell=use_shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
cwd=build_file_dir)
p_stdout, p_stderr = p.communicate('')
if p.wait() != 0 or p_stderr:
sys.stderr.write(p_stderr)
# Simulate check_call behavior, since check_call only exists
# in python 2.5 and later.
raise Exception("Call to '%s' returned exit status %d." %
(contents, p.returncode))
replacement = p_stdout.rstrip()
cached_command_results[cache_key] = replacement
else: else:
gyp.DebugOutput(gyp.DEBUG_VARIABLES, # Fix up command with platform specific workarounds.
"Had cache value for command '%s' in directory '%s'" % contents = FixupPlatformCommand(contents)
(contents,build_file_dir)) p = subprocess.Popen(contents, shell=use_shell,
replacement = cached_value stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
cwd=build_file_dir)
p_stdout, p_stderr = p.communicate('')
if p.wait() != 0 or p_stderr:
sys.stderr.write(p_stderr)
# Simulate check_call behavior, since check_call only exists
# in python 2.5 and later.
raise Exception("Call to '%s' returned exit status %d." %
(contents, p.returncode))
replacement = p_stdout.rstrip()
cached_command_results[cache_key] = replacement
else: else:
if not contents in variables: gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Had cache value for command '%s' in directory '%s'" %
(contents,build_file_dir))
replacement = cached_value
else:
if not contents in variables:
if contents[-1] in ['!', '/']:
# In order to allow cross-compiles (nacl) to happen more naturally,
# we will allow references to >(sources/) etc. to resolve to
# and empty list if undefined. This allows actions to:
# 'action!': [
# '>@(_sources!)',
# ],
# 'action/': [
# '>@(_sources/)',
# ],
replacement = []
else:
raise KeyError, 'Undefined variable ' + contents + \ raise KeyError, 'Undefined variable ' + contents + \
' in ' + build_file ' in ' + build_file
else:
replacement = variables[contents] replacement = variables[contents]
if isinstance(replacement, list):
for item in replacement:
if (not contents[-1] == '/' and
not isinstance(item, str) and not isinstance(item, int)):
raise TypeError, 'Variable ' + contents + \
' must expand to a string or list of strings; ' + \
'list contains a ' + \
item.__class__.__name__
# Run through the list and handle variable expansions in it. Since
# the list is guaranteed not to contain dicts, this won't do anything
# with conditions sections.
ProcessVariablesAndConditionsInList(replacement, phase, variables,
build_file)
elif not isinstance(replacement, str) and \
not isinstance(replacement, int):
raise TypeError, 'Variable ' + contents + \
' must expand to a string or list of strings; ' + \
'found a ' + replacement.__class__.__name__
if expand_to_list:
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
if isinstance(replacement, list): if isinstance(replacement, list):
for item in replacement: # If it's already a list, make a copy.
if not isinstance(item, str) and not isinstance(item, int): output = replacement[:]
raise TypeError, 'Variable ' + contents + \
' must expand to a string or list of strings; ' + \
'list contains a ' + \
item.__class__.__name__
# Run through the list and handle variable expansions in it. Since
# the list is guaranteed not to contain dicts, this won't do anything
# with conditions sections.
ProcessVariablesAndConditionsInList(replacement, is_late, variables,
build_file)
elif not isinstance(replacement, str) and \
not isinstance(replacement, int):
raise TypeError, 'Variable ' + contents + \
' must expand to a string or list of strings; ' + \
'found a ' + replacement.__class__.__name__
if expand_to_list:
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
if isinstance(replacement, list):
# If it's already a list, make a copy.
output = replacement[:]
else:
# Split it the same way sh would split arguments.
output = shlex.split(str(replacement))
else: else:
# Expanding in string context. # Split it the same way sh would split arguments.
encoded_replacement = '' output = shlex.split(str(replacement))
if isinstance(replacement, list): else:
# When expanding a list into string context, turn the list items # Expanding in string context.
# into a string in a way that will work with a subprocess call. encoded_replacement = ''
# if isinstance(replacement, list):
# TODO(mark): This isn't completely correct. This should # When expanding a list into string context, turn the list items
# call a generator-provided function that observes the # into a string in a way that will work with a subprocess call.
# proper list-to-argument quoting rules on a specific #
# platform instead of just calling the POSIX encoding # TODO(mark): This isn't completely correct. This should
# routine. # call a generator-provided function that observes the
encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) # proper list-to-argument quoting rules on a specific
else: # platform instead of just calling the POSIX encoding
encoded_replacement = replacement # routine.
encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
else:
encoded_replacement = replacement
output = output[:replace_start] + str(encoded_replacement) + \ output = output[:replace_start] + str(encoded_replacement) + \
output[replace_end:] output[replace_end:]
# Prepare for the next match iteration. # Prepare for the next match iteration.
input_str = output input_str = output
# Look for more matches now that we've replaced some, to deal with # Look for more matches now that we've replaced some, to deal with
# expanding local variables (variables defined in the same # expanding local variables (variables defined in the same
# variables block as this one). # variables block as this one).
gyp.DebugOutput(gyp.DEBUG_VARIABLES, gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Found output %s, recursing." % repr(output)) "Found output %s, recursing." % repr(output))
if isinstance(output, list): if isinstance(output, list):
if output and isinstance(output[0], list):
# Leave output alone if it's a list of lists.
# We don't want such lists to be stringified.
pass
else:
new_output = [] new_output = []
for item in output: for item in output:
new_output.append(ExpandVariables(item, is_late, variables, build_file)) new_output.append(
ExpandVariables(item, phase, variables, build_file))
output = new_output output = new_output
else: else:
output = ExpandVariables(output, is_late, variables, build_file) output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers. # Convert all strings that are canonically-represented integers into integers.
if isinstance(output, list): if isinstance(output, list):
@ -779,14 +817,15 @@ def ExpandVariables(input, is_late, variables, build_file):
elif IsStrCanonicalInt(output): elif IsStrCanonicalInt(output):
output = int(output) output = int(output)
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Expanding %s to %s" % (repr(input), repr(output)))
return output return output
def ProcessConditionsInDict(the_dict, is_late, variables, build_file): def ProcessConditionsInDict(the_dict, phase, variables, build_file):
# Process a 'conditions' or 'target_conditions' section in the_dict, # Process a 'conditions' or 'target_conditions' section in the_dict,
# depending on is_late. If is_late is False, 'conditions' is used. # depending on phase.
# early -> conditions
# late -> target_conditions
# latelate -> no conditions
# #
# Each item in a conditions list consists of cond_expr, a string expression # Each item in a conditions list consists of cond_expr, a string expression
# evaluated as the condition, and true_dict, a dict that will be merged into # evaluated as the condition, and true_dict, a dict that will be merged into
@ -795,13 +834,17 @@ def ProcessConditionsInDict(the_dict, is_late, variables, build_file):
# cond_expr evaluates to false. # cond_expr evaluates to false.
# #
# Any dict merged into the_dict will be recursively processed for nested # Any dict merged into the_dict will be recursively processed for nested
# conditionals and other expansions, also according to is_late, immediately # conditionals and other expansions, also according to phase, immediately
# prior to being merged. # prior to being merged.
if not is_late: if phase == PHASE_EARLY:
conditions_key = 'conditions' conditions_key = 'conditions'
else: elif phase == PHASE_LATE:
conditions_key = 'target_conditions' conditions_key = 'target_conditions'
elif phase == PHASE_LATELATE:
return
else:
assert False
if not conditions_key in the_dict: if not conditions_key in the_dict:
return return
@ -828,7 +871,7 @@ def ProcessConditionsInDict(the_dict, is_late, variables, build_file):
# contain variable references without needing to resort to GYP expansion # contain variable references without needing to resort to GYP expansion
# syntax, this is of dubious value for variables, but someone might want to # syntax, this is of dubious value for variables, but someone might want to
# use a command expansion directly inside a condition. # use a command expansion directly inside a condition.
cond_expr_expanded = ExpandVariables(cond_expr, is_late, variables, cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
build_file) build_file)
if not isinstance(cond_expr_expanded, str) and \ if not isinstance(cond_expr_expanded, str) and \
not isinstance(cond_expr_expanded, int): not isinstance(cond_expr_expanded, int):
@ -857,7 +900,7 @@ def ProcessConditionsInDict(the_dict, is_late, variables, build_file):
if merge_dict != None: if merge_dict != None:
# Expand variables and nested conditinals in the merge_dict before # Expand variables and nested conditinals in the merge_dict before
# merging it. # merging it.
ProcessVariablesAndConditionsInDict(merge_dict, is_late, ProcessVariablesAndConditionsInDict(merge_dict, phase,
variables, build_file) variables, build_file)
MergeDicts(the_dict, merge_dict, build_file, build_file) MergeDicts(the_dict, merge_dict, build_file, build_file)
@ -901,7 +944,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
variables[variable_name] = value variables[variable_name] = value
def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in, def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
build_file, the_dict_key=None): build_file, the_dict_key=None):
"""Handle all variable and command expansion and conditional evaluation. """Handle all variable and command expansion and conditional evaluation.
@ -928,7 +971,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
# Pass a copy of the variables dict to avoid having it be tainted. # Pass a copy of the variables dict to avoid having it be tainted.
# Otherwise, it would have extra automatics added for everything that # Otherwise, it would have extra automatics added for everything that
# should just be an ordinary variable in this scope. # should just be an ordinary variable in this scope.
ProcessVariablesAndConditionsInDict(the_dict['variables'], is_late, ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
variables, build_file, 'variables') variables, build_file, 'variables')
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
@ -936,7 +979,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
for key, value in the_dict.iteritems(): for key, value in the_dict.iteritems():
# Skip "variables", which was already processed if present. # Skip "variables", which was already processed if present.
if key != 'variables' and isinstance(value, str): if key != 'variables' and isinstance(value, str):
expanded = ExpandVariables(value, is_late, variables, build_file) expanded = ExpandVariables(value, phase, variables, build_file)
if not isinstance(expanded, str) and not isinstance(expanded, int): if not isinstance(expanded, str) and not isinstance(expanded, int):
raise ValueError, \ raise ValueError, \
'Variable expansion in this context permits str and int ' + \ 'Variable expansion in this context permits str and int ' + \
@ -981,7 +1024,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
# 'target_conditions' section, perform appropriate merging and recursive # 'target_conditions' section, perform appropriate merging and recursive
# conditional and variable processing, and then remove the conditions section # conditional and variable processing, and then remove the conditions section
# from the_dict if it is present. # from the_dict if it is present.
ProcessConditionsInDict(the_dict, is_late, variables, build_file) ProcessConditionsInDict(the_dict, phase, variables, build_file)
# Conditional processing may have resulted in changes to automatics or the # Conditional processing may have resulted in changes to automatics or the
# variables dict. Reload. # variables dict. Reload.
@ -999,21 +1042,21 @@ def ProcessVariablesAndConditionsInDict(the_dict, is_late, variables_in,
if isinstance(value, dict): if isinstance(value, dict):
# Pass a copy of the variables dict so that subdicts can't influence # Pass a copy of the variables dict so that subdicts can't influence
# parents. # parents.
ProcessVariablesAndConditionsInDict(value, is_late, variables, ProcessVariablesAndConditionsInDict(value, phase, variables,
build_file, key) build_file, key)
elif isinstance(value, list): elif isinstance(value, list):
# The list itself can't influence the variables dict, and # The list itself can't influence the variables dict, and
# ProcessVariablesAndConditionsInList will make copies of the variables # ProcessVariablesAndConditionsInList will make copies of the variables
# dict if it needs to pass it to something that can influence it. No # dict if it needs to pass it to something that can influence it. No
# copy is necessary here. # copy is necessary here.
ProcessVariablesAndConditionsInList(value, is_late, variables, ProcessVariablesAndConditionsInList(value, phase, variables,
build_file) build_file)
elif not isinstance(value, int): elif not isinstance(value, int):
raise TypeError, 'Unknown type ' + value.__class__.__name__ + \ raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
' for ' + key ' for ' + key
def ProcessVariablesAndConditionsInList(the_list, is_late, variables, def ProcessVariablesAndConditionsInList(the_list, phase, variables,
build_file): build_file):
# Iterate using an index so that new values can be assigned into the_list. # Iterate using an index so that new values can be assigned into the_list.
index = 0 index = 0
@ -1022,18 +1065,16 @@ def ProcessVariablesAndConditionsInList(the_list, is_late, variables,
if isinstance(item, dict): if isinstance(item, dict):
# Make a copy of the variables dict so that it won't influence anything # Make a copy of the variables dict so that it won't influence anything
# outside of its own scope. # outside of its own scope.
ProcessVariablesAndConditionsInDict(item, is_late, variables, build_file) ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
elif isinstance(item, list): elif isinstance(item, list):
ProcessVariablesAndConditionsInList(item, is_late, variables, build_file) ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
elif isinstance(item, str): elif isinstance(item, str):
expanded = ExpandVariables(item, is_late, variables, build_file) expanded = ExpandVariables(item, phase, variables, build_file)
if isinstance(expanded, str) or isinstance(expanded, int): if isinstance(expanded, str) or isinstance(expanded, int):
the_list[index] = expanded the_list[index] = expanded
elif isinstance(expanded, list): elif isinstance(expanded, list):
del the_list[index] the_list[index:index+1] = expanded
for expanded_item in expanded: index += len(expanded)
the_list.insert(index, expanded_item)
index = index + 1
# index now identifies the next item to examine. Continue right now # index now identifies the next item to examine. Continue right now
# without falling into the index increment below. # without falling into the index increment below.
@ -1098,7 +1139,6 @@ def QualifyDependencies(targets):
for index in xrange(0, len(dependencies)): for index in xrange(0, len(dependencies)):
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
target_build_file, dependencies[index], toolset) target_build_file, dependencies[index], toolset)
global multiple_toolsets
if not multiple_toolsets: if not multiple_toolsets:
# Ignore toolset specification in the dependency if it is specified. # Ignore toolset specification in the dependency if it is specified.
dep_toolset = toolset dep_toolset = toolset
@ -1184,6 +1224,22 @@ def ExpandWildcardDependencies(targets, data):
index = index + 1 index = index + 1
def Unify(l):
"""Removes duplicate elements from l, keeping the first element."""
seen = {}
return [seen.setdefault(e, e) for e in l if e not in seen]
def RemoveDuplicateDependencies(targets):
"""Makes sure every dependency appears only once in all targets's dependency
lists."""
for target_name, target_dict in targets.iteritems():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
target_dict[dependency_key] = Unify(dependencies)
class DependencyGraphNode(object): class DependencyGraphNode(object):
""" """
@ -1212,14 +1268,14 @@ class DependencyGraphNode(object):
# dependencies not in flat_list. Initially, it is a copy of the children # dependencies not in flat_list. Initially, it is a copy of the children
# of this node, because when the graph was built, nodes with no # of this node, because when the graph was built, nodes with no
# dependencies were made implicit dependents of the root node. # dependencies were made implicit dependents of the root node.
in_degree_zeros = self.dependents[:] in_degree_zeros = set(self.dependents[:])
while in_degree_zeros: while in_degree_zeros:
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
# can be appended to flat_list. Take these nodes out of in_degree_zeros # can be appended to flat_list. Take these nodes out of in_degree_zeros
# as work progresses, so that the next node to process from the list can # as work progresses, so that the next node to process from the list can
# always be accessed at a consistent position. # always be accessed at a consistent position.
node = in_degree_zeros.pop(0) node = in_degree_zeros.pop()
flat_list.append(node.ref) flat_list.append(node.ref)
# Look at dependents of the node just added to flat_list. Some of them # Look at dependents of the node just added to flat_list. Some of them
@ -1239,7 +1295,7 @@ class DependencyGraphNode(object):
# All of the dependent's dependencies are already in flat_list. Add # All of the dependent's dependencies are already in flat_list. Add
# it to in_degree_zeros where it will be processed in a future # it to in_degree_zeros where it will be processed in a future
# iteration of the outer loop. # iteration of the outer loop.
in_degree_zeros.append(node_dependent) in_degree_zeros.add(node_dependent)
return flat_list return flat_list
@ -1423,7 +1479,8 @@ def BuildDependencyList(targets):
# targets that are not in flat_list. # targets that are not in flat_list.
if len(flat_list) != len(targets): if len(flat_list) != len(targets):
raise DependencyGraphNode.CircularException, \ raise DependencyGraphNode.CircularException, \
'Some targets not reachable, cycle in dependency graph detected' 'Some targets not reachable, cycle in dependency graph detected: ' + \
' '.join(set(flat_list) ^ set(targets))
return [dependency_nodes, flat_list] return [dependency_nodes, flat_list]
@ -1584,7 +1641,7 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
# Initialize this here to speed up MakePathRelative. # Initialize this here to speed up MakePathRelative.
exception_re = re.compile(r'''["']?[-/$<>]''') exception_re = re.compile(r'''["']?[-/$<>^]''')
def MakePathRelative(to_file, fro_file, item): def MakePathRelative(to_file, fro_file, item):
@ -1600,6 +1657,7 @@ def MakePathRelative(to_file, fro_file, item):
# "libraries" section) # "libraries" section)
# < Used for our own variable and command expansions (see ExpandVariables) # < Used for our own variable and command expansions (see ExpandVariables)
# > Used for our own variable and command expansions (see ExpandVariables) # > Used for our own variable and command expansions (see ExpandVariables)
# ^ Used for our own variable and command expansions (see ExpandVariables)
# #
# "/' Used when a value is quoted. If these are present, then we # "/' Used when a value is quoted. If these are present, then we
# check the second character instead. # check the second character instead.
@ -1816,7 +1874,6 @@ def MergeConfigWithInheritance(new_configuration_dict, build_file,
def SetUpConfigurations(target, target_dict): def SetUpConfigurations(target, target_dict):
global non_configuration_keys
# key_suffixes is a list of key suffixes that might appear on key names. # key_suffixes is a list of key suffixes that might appear on key names.
# These suffixes are handled in conditional evaluations (for =, +, and ?) # These suffixes are handled in conditional evaluations (for =, +, and ?)
# and rules/exclude processing (for ! and /). Keys with these suffixes # and rules/exclude processing (for ! and /). Keys with these suffixes
@ -2004,7 +2061,7 @@ def ProcessListFiltersInDict(name, the_dict):
else: else:
# This is an action that doesn't make any sense. # This is an action that doesn't make any sense.
raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \ raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
' key ' + key ' key ' + regex_key
for index in xrange(0, len(the_list)): for index in xrange(0, len(the_list)):
list_item = the_list[index] list_item = the_list[index]
@ -2084,6 +2141,33 @@ def ValidateTargetType(target, target_dict):
(target, target_type, '/'.join(VALID_TARGET_TYPES))) (target, target_type, '/'.join(VALID_TARGET_TYPES)))
def ValidateSourcesInTarget(target, target_dict, build_file):
# TODO: Check if MSVC allows this for non-static_library targets.
if target_dict.get('type', None) != 'static_library':
return
sources = target_dict.get('sources', [])
basenames = {}
for source in sources:
name, ext = os.path.splitext(source)
is_compiled_file = ext in [
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
if not is_compiled_file:
continue
basename = os.path.basename(name) # Don't include extension.
basenames.setdefault(basename, []).append(source)
error = ''
for basename, files in basenames.iteritems():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
print ('static library %s has several files with the same basename:\n' %
target + error + 'Some build systems, e.g. MSVC08, '
'cannot handle that.')
raise KeyError, 'Duplicate basenames in sources section, see list above'
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
"""Ensures that the rules sections in target_dict are valid and consistent, """Ensures that the rules sections in target_dict are valid and consistent,
and determines which sources they apply to. and determines which sources they apply to.
@ -2141,19 +2225,6 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
rule['rule_sources'] = rule_sources rule['rule_sources'] = rule_sources
def ValidateActionsInTarget(target, target_dict, build_file):
'''Validates the inputs to the actions in a target.'''
target_name = target_dict.get('target_name')
actions = target_dict.get('actions', [])
for action in actions:
action_name = action.get('action_name')
if not action_name:
raise Exception("Anonymous action in target %s. "
"An action must have an 'action_name' field." %
target_name)
inputs = action.get('inputs', [])
def ValidateRunAsInTarget(target, target_dict, build_file): def ValidateRunAsInTarget(target, target_dict, build_file):
target_name = target_dict.get('target_name') target_name = target_dict.get('target_name')
run_as = target_dict.get('run_as') run_as = target_dict.get('run_as')
@ -2184,6 +2255,24 @@ def ValidateRunAsInTarget(target, target_dict, build_file):
(target_name, build_file)) (target_name, build_file))
def ValidateActionsInTarget(target, target_dict, build_file):
'''Validates the inputs to the actions in a target.'''
target_name = target_dict.get('target_name')
actions = target_dict.get('actions', [])
for action in actions:
action_name = action.get('action_name')
if not action_name:
raise Exception("Anonymous action in target %s. "
"An action must have an 'action_name' field." %
target_name)
inputs = action.get('inputs', None)
if inputs is None:
raise Exception('Action in target %s has no inputs.' % target_name)
action_command = action.get('action')
if action_command and not action_command[0]:
raise Exception("Empty action as command in target %s." % target_name)
def TurnIntIntoStrInDict(the_dict): def TurnIntIntoStrInDict(the_dict):
"""Given dict the_dict, recursively converts all integers into strings. """Given dict the_dict, recursively converts all integers into strings.
""" """
@ -2314,6 +2403,9 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
for key in tmp_dict: for key in tmp_dict:
target_dict[key] = tmp_dict[key] target_dict[key] = tmp_dict[key]
# Make sure every dependency appears at most once.
RemoveDuplicateDependencies(targets)
if circular_check: if circular_check:
# Make sure that any targets in a.gyp don't contain dependencies in other # Make sure that any targets in a.gyp don't contain dependencies in other
# .gyp files that further depend on a.gyp. # .gyp files that further depend on a.gyp.
@ -2348,8 +2440,8 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
for target in flat_list: for target in flat_list:
target_dict = targets[target] target_dict = targets[target]
build_file = gyp.common.BuildFile(target) build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(target_dict, True, variables, ProcessVariablesAndConditionsInDict(
build_file) target_dict, PHASE_LATE, variables, build_file)
# Move everything that can go into a "configurations" section into one. # Move everything that can go into a "configurations" section into one.
for target in flat_list: for target in flat_list:
@ -2361,6 +2453,13 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
target_dict = targets[target] target_dict = targets[target]
ProcessListFiltersInDict(target, target_dict) ProcessListFiltersInDict(target, target_dict)
# Apply "latelate" variable expansions and condition evaluations.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(
target_dict, PHASE_LATELATE, variables, build_file)
# Make sure that the rules make sense, and build up rule_sources lists as # Make sure that the rules make sense, and build up rule_sources lists as
# needed. Not all generators will need to use the rule_sources lists, but # needed. Not all generators will need to use the rule_sources lists, but
# some may, and it seems best to build the list in a common spot. # some may, and it seems best to build the list in a common spot.
@ -2369,6 +2468,10 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
target_dict = targets[target] target_dict = targets[target]
build_file = gyp.common.BuildFile(target) build_file = gyp.common.BuildFile(target)
ValidateTargetType(target, target_dict) ValidateTargetType(target, target_dict)
# TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
# scalesystemdependent_arm_additions.c or similar.
if 'arm' not in variables.get('target_arch', ''):
ValidateSourcesInTarget(target, target_dict, build_file)
ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
ValidateRunAsInTarget(target, target_dict, build_file) ValidateRunAsInTarget(target, target_dict, build_file)
ValidateActionsInTarget(target, target_dict, build_file) ValidateActionsInTarget(target, target_dict, build_file)

33
tools/gyp/pylib/gyp/mac_tool.py

@ -51,26 +51,31 @@ class MacTool(object):
shutil.rmtree(dest) shutil.rmtree(dest)
shutil.copytree(source, dest) shutil.copytree(source, dest)
elif extension == '.xib': elif extension == '.xib':
self._CopyXIBFile(source, dest) return self._CopyXIBFile(source, dest)
elif extension == '.strings': elif extension == '.strings':
self._CopyStringsFile(source, dest) self._CopyStringsFile(source, dest)
# TODO: Given that files with arbitrary extensions can be copied to the
# bundle, we will want to get rid of this whitelist eventually.
elif extension in [
'.icns', '.manifest', '.pak', '.pdf', '.png', '.sb', '.sh',
'.ttf', '.sdef']:
shutil.copyfile(source, dest)
else: else:
raise NotImplementedError( shutil.copyfile(source, dest)
"Don't know how to copy bundle resources of type %s while copying "
"%s to %s)" % (extension, source, dest))
def _CopyXIBFile(self, source, dest): def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle.""" """Compiles a XIB file with ibtool into a binary plist in the bundle."""
args = ['/Developer/usr/bin/ibtool', '--errors', '--warnings', tools_dir = os.environ.get('DEVELOPER_BIN_DIR', '/usr/bin')
args = [os.path.join(tools_dir, 'ibtool'), '--errors', '--warnings',
'--notices', '--output-format', 'human-readable-text', '--compile', '--notices', '--output-format', 'human-readable-text', '--compile',
dest, source] dest, source]
subprocess.call(args) ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
current_section_header = None
for line in ibtoolout.stdout:
if ibtool_section_re.match(line):
current_section_header = line
elif not ibtool_re.match(line):
if current_section_header:
sys.stdout.write(current_section_header)
current_section_header = None
sys.stdout.write(line)
return ibtoolout.returncode
def _CopyStringsFile(self, source, dest): def _CopyStringsFile(self, source, dest):
"""Copies a .strings file using iconv to reconvert the input into UTF-16.""" """Copies a .strings file using iconv to reconvert the input into UTF-16."""
@ -138,8 +143,8 @@ class MacTool(object):
# The format of PkgInfo is eight characters, representing the bundle type # The format of PkgInfo is eight characters, representing the bundle type
# and bundle signature, each four characters. If that is missing, four # and bundle signature, each four characters. If that is missing, four
# '?' characters are used instead. # '?' characters are used instead.
signature_code = plist['CFBundleSignature'] signature_code = plist.get('CFBundleSignature', '????')
if len(signature_code) != 4: if len(signature_code) != 4: # Wrong length resets everything, too.
signature_code = '?' * 4 signature_code = '?' * 4
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')

642
tools/gyp/pylib/gyp/msvs_emulation.py

@ -0,0 +1,642 @@
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
return dxsdk_dir
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
self.dxsdk_dir = _FindDirectXInstallation()
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
self.wdk_dir = os.environ.get('WDK_DIR')
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = self.GetTargetPlatform(config)
target_platform = {'x86': 'Win32'}.get(target_platform, target_platform)
replacements = {
'$(VSInstallDir)': self.vs_version.Path(),
'$(VCInstallDir)': os.path.join(self.vs_version.Path(), 'VC') + '\\',
'$(OutDir)\\': base_to_build + '\\' if base_to_build else '',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(InputPath)': '${source}',
'$(InputName)': '${root}',
'$(ProjectName)': self.spec['target_name'],
'$(PlatformName)': target_platform,
}
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else ''
replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else ''
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
return [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetTargetPlatform(self, config):
target_platform = self.msvs_target_platform.get(config, '')
if not target_platform:
target_platform = 'Win32'
return {'Win32': 'x86'}.get(target_platform, target_platform)
def _RealConfig(self, config):
target_platform = self.GetTargetPlatform(config)
if target_platform == 'x64' and not config.endswith('_x64'):
config += '_x64'
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
config = self._RealConfig(config)
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
config = self._RealConfig(config)
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._RealConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._RealConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._RealConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._RealConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('AdditionalOptions', prefix='')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def GetPrecompiledHeader(self, config, gyp_to_build_path):
"""Returns an object that handles the generation of precompiled header
build steps."""
config = self._RealConfig(config)
return _PchHelper(self, config, gyp_to_build_path)
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._RealConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._RealConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._RealConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._RealConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._RealConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('AdditionalOptions')
return libflags
def _GetDefFileAsLdflags(self, spec, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = ''
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
ldflags.append('/DEF:"%s"' % gyp_to_build_path(def_files[0]))
elif len(def_files) > 1:
raise Exception("Multiple .def files")
def GetLdflags(self, config, gyp_to_build_path, expand_special):
"""Returns the flags that need to be added to link commands."""
config = self._RealConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(self.spec, ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
ld('AdditionalOptions', prefix='')
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration', map={'1': '/LTCG'})
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
# TODO(scottmg): These too.
ldflags.extend(('kernel32.lib', 'user32.lib', 'gdi32.lib', 'winspool.lib',
'comdlg32.lib', 'advapi32.lib', 'shell32.lib', 'ole32.lib',
'oleaut32.lib', 'uuid.lib', 'odbc32.lib', 'DelayImp.lib'))
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
return ldflags
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._RealConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._RealConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/') for a in args]
args = ["'%s'" % a.replace("'", "\\'") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def HasExplicitIdlRules(self, spec):
"""Determine if there's an explicit rule for idl files. When there isn't we
need to generate implicit rules to build MIDL .idl files."""
for rule in spec.get('rules', []):
if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
return True
return False
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._RealConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
flags = ['/char', 'signed', '/env', 'win32', '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(self, settings, config, gyp_to_build_path):
self.settings = settings
self.config = config
self.gyp_to_build_path = gyp_to_build_path
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def _PchSource(self):
"""Get the source file that is built once to compile the pch data."""
return self.gyp_to_build_path(
self.settings.msvs_precompiled_source[self.config])
def _PchOutput(self):
"""Get the name of the output of the compiled pch data."""
return '${pchprefix}.' + self._PchHeader() + '.pch'
def GetObjDependencies(self, sources, objs):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatability
with make.py on Mac, and xcode_emulation.py."""
if not self._PchHeader():
return []
source = self._PchSource()
assert source
pch_ext = os.path.splitext(self._PchSource())[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self._PchOutput())]
return []
def GetPchBuildCommands(self):
"""Returns [(path_to_pch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory."""
header = self._PchHeader()
source = self._PchSource()
if not source or not header:
return []
ext = os.path.splitext(source)[1]
lang = 'c' if ext == '.c' else 'cc'
return [(self._PchOutput(), '/Yc' + header, lang, source)]
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path."""
vs = GetVSVersion(generator_flags)
for arch in ('x86', 'x64'):
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()

20
tools/gyp/pylib/gyp/ninja_syntax.py

@ -35,7 +35,8 @@ class Writer(object):
self._line('%s = %s' % (key, value), indent) self._line('%s = %s' % (key, value), indent)
def rule(self, name, command, description=None, depfile=None, def rule(self, name, command, description=None, depfile=None,
generator=False, restat=False, deplist=None): generator=False, restat=False, deplist=None, rspfile=None,
rspfile_content=None):
self._line('rule %s' % name) self._line('rule %s' % name)
self.variable('command', command, indent=1) self.variable('command', command, indent=1)
if description: if description:
@ -48,13 +49,17 @@ class Writer(object):
self.variable('generator', '1', indent=1) self.variable('generator', '1', indent=1)
if restat: if restat:
self.variable('restat', '1', indent=1) self.variable('restat', '1', indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None): variables=None):
outputs = self._as_list(outputs) outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:] all_inputs = self._as_list(inputs)[:]
out_outputs = map(escape_spaces, outputs) out_outputs = list(map(escape_spaces, outputs))
all_inputs = map(escape_spaces, all_inputs) all_inputs = list(map(escape_spaces, all_inputs))
if implicit: if implicit:
implicit = map(escape_spaces, self._as_list(implicit)) implicit = map(escape_spaces, self._as_list(implicit))
@ -70,7 +75,12 @@ class Writer(object):
' '.join(all_inputs))) ' '.join(all_inputs)))
if variables: if variables:
for key, val in variables: if isinstance(variables, dict):
iterator = variables.iteritems()
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1) self.variable(key, val, indent=1)
return outputs return outputs
@ -96,7 +106,7 @@ class Writer(object):
def _line(self, text, indent=0): def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters.""" """Write 'text' word-wrapped at self.width characters."""
leading_space = ' ' * indent leading_space = ' ' * indent
while len(text) > self.width: while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible. # The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and # Find the rightmost space that would obey our width constraint and

161
tools/gyp/pylib/gyp/win_tool.py

@ -0,0 +1,161 @@
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for Windows builds.
These functions are executed via gyp-win-tool when using the ninja generator.
"""
import os
import shutil
import subprocess
import sys
def main(args):
executor = WinTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
return name_string.title().replace('-', '')
def _GetEnv(self, arch):
"""Gets the saved environment from a file for a given architecture."""
# The environment is saved as an "environment block" (see CreateProcess
# and msvs_emulation for details). We convert to a dict here.
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
pairs = open(arch).read()[:-2].split('\0')
kvs = [item.split('=', 1) for item in pairs]
return dict(kvs)
def ExecStamp(self, path):
"""Simple stamp command."""
open(path, 'w').close()
def ExecRecursiveMirror(self, source, dest):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
shutil.rmtree(dest)
else:
os.unlink(dest)
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
shutil.copy2(source, dest)
def ExecLinkWrapper(self, arch, *args):
"""Filter diagnostic output from link that looks like:
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if not line.startswith(' Creating library '):
print line
return popen.returncode
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't
quietable via command line flags.
"""
args = ['midl', '/nologo'] + list(flags) + [
'/out', outdir,
'/tlb', tlb,
'/h', h,
'/dlldata', dlldata,
'/iid', iid,
'/proxy', proxy,
idl]
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefix = 'Processing '
processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
for line in lines:
if not line.startswith(prefix) and line not in processing:
print line
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
# MSVS doesn't assemble x64 asm files.
if arch == 'environment.x64':
return 0
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Copyright (C) Microsoft Corporation') and
not line.startswith('Microsoft (R) Macro Assembler') and
not line.startswith(' Assembling: ') and
line):
print line
return popen.returncode
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
not line.startswith('Copyright (C) Microsoft Corporation') and
line):
print line
return popen.returncode
def ExecClWrapper(self, arch, depname, *args):
"""Runs cl.exe and filters output through ninja-deplist-helper to get
dependendency information which is stored in |depname|."""
env = self._GetEnv(arch)
args = ' '.join(args) + \
'| ninja-deplist-helper -r . -q -f cl -o ' + depname + '"'
popen = subprocess.Popen(args, shell=True, env=env)
popen.wait()
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
args = open(rspfile).read()
dir = dir[0] if dir else None
popen = subprocess.Popen(args, shell=True, env=env, cwd=dir)
popen.wait()
return popen.returncode
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

324
tools/gyp/pylib/gyp/xcode_emulation.py

@ -15,6 +15,10 @@ import shlex
class XcodeSettings(object): class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object.""" """A class that understands the gyp 'xcode_settings' object."""
# Computed lazily by _GetSdkBaseDir(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_base_dir = None
def __init__(self, spec): def __init__(self, spec):
self.spec = spec self.spec = spec
@ -215,11 +219,33 @@ class XcodeSettings(object):
else: else:
return self._GetStandaloneBinaryPath() return self._GetStandaloneBinaryPath()
def _GetSdkBaseDir(self):
"""Returns the root of the 'Developer' directory. On Xcode 4.2 and prior,
this is usually just /Developer. Xcode 4.3 moved that folder into the Xcode
bundle."""
if not XcodeSettings._sdk_base_dir:
import subprocess
job = subprocess.Popen(['xcode-select', '-print-path'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, err = job.communicate()
if job.returncode != 0:
print out
raise Exception('Error %d running xcode-select' % job.returncode)
# The Developer folder moved in Xcode 4.3.
xcode43_sdk_path = os.path.join(
out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
if os.path.isdir(xcode43_sdk_path):
XcodeSettings._sdk_base_dir = xcode43_sdk_path
else:
XcodeSettings._sdk_base_dir = os.path.join(out.rstrip(), 'SDKs')
return XcodeSettings._sdk_base_dir
def _SdkPath(self): def _SdkPath(self):
sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx10.5') sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx10.5')
if sdk_root.startswith('macosx'): if sdk_root.startswith('macosx'):
sdk_root = 'MacOSX' + sdk_root[len('macosx'):] sdk_root = 'MacOSX' + sdk_root[len('macosx'):]
return '/Developer/SDKs/%s.sdk' % sdk_root return os.path.join(self._GetSdkBaseDir(), '%s.sdk' % sdk_root)
def GetCflags(self, configname): def GetCflags(self, configname):
"""Returns flags that need to be added to .c, .cc, .m, and .mm """Returns flags that need to be added to .c, .cc, .m, and .mm
@ -281,7 +307,6 @@ class XcodeSettings(object):
self._WarnUnimplemented('COPY_PHASE_STRIP') self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS') self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS') self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_GC')
# TODO: This is exported correctly, but assigning to it is not supported. # TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE') self._WarnUnimplemented('MACH_O_TYPE')
@ -305,7 +330,6 @@ class XcodeSettings(object):
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'): if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2') cflags.append('-msse4.2')
cflags += self._Settings().get('OTHER_CFLAGS', [])
cflags += self._Settings().get('WARNING_CFLAGS', []) cflags += self._Settings().get('WARNING_CFLAGS', [])
config = self.spec['configurations'][self.configname] config = self.spec['configurations'][self.configname]
@ -321,6 +345,7 @@ class XcodeSettings(object):
self.configname = configname self.configname = configname
cflags_c = [] cflags_c = []
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s') self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None self.configname = None
return cflags_c return cflags_c
@ -336,24 +361,140 @@ class XcodeSettings(object):
cflags_cc.append('-fvisibility-inlines-hidden') cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'): if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics') cflags_cc.append('-fno-threadsafe-statics')
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None self.configname = None
return cflags_cc return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def GetCflagsObjC(self, configname): def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations.""" """Returns flags that need to be added to .m compilations."""
self.configname = configname self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self.configname = None self.configname = None
return [] return cflags_objc
def GetCflagsObjCC(self, configname): def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations.""" """Returns flags that need to be added to .mm compilations."""
self.configname = configname self.configname = configname
cflags_objcc = [] cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'): if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors') cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None self.configname = None
return cflags_objcc return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = '(\S+)'
WORD = '\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path): def GetLdflags(self, configname, product_dir, gyp_to_build_path):
"""Returns flags that need to be passed to the linker. """Returns flags that need to be passed to the linker.
@ -368,19 +509,9 @@ class XcodeSettings(object):
ldflags = [] ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# contains two entries that depend on this. Explicitly absolutify for these # can contain entries that depend on this. Explicitly absolutify these.
# two cases.
def MapGypPathWithPrefix(flag, prefix):
if flag.startswith(prefix):
flag = prefix + gyp_to_build_path(flag[len(prefix):])
return flag
for ldflag in self._Settings().get('OTHER_LDFLAGS', []): for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
# TODO(thakis): Update ffmpeg.gyp):
ldflag = MapGypPathWithPrefix(ldflag, '-L')
# Required for the nacl plugin:
ldflag = MapGypPathWithPrefix(ldflag, '-Wl,-exported_symbols_list ')
ldflags.append(ldflag)
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'): if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip') ldflags.append('-Wl,-dead_strip')
@ -415,48 +546,12 @@ class XcodeSettings(object):
# Xcode adds the product directory by default. # Xcode adds the product directory by default.
ldflags.append('-L' + product_dir) ldflags.append('-L' + product_dir)
install_name = self.GetPerTargetSetting('LD_DYLIB_INSTALL_NAME') install_name = self.GetInstallName()
install_base = self.GetPerTargetSetting('DYLIB_INSTALL_NAME_BASE')
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
if not install_name and install_base:
install_name = default_install_name
if install_name: if install_name:
# Hardcode support for the variables used in chromium for now, to unblock ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
# people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported yet'
' in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in install_base:
prefix, rest = '', install_base
if install_base.startswith('@'):
prefix, rest = install_base.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
install_base = os.path.join(prefix, rest)
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)', install_base)
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
install_name = install_name.replace(' ', r'\ ') for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-install_name ' + install_name) ldflags.append('-Wl,-rpath,' + rpath)
self.configname = None self.configname = None
return ldflags return ldflags
@ -660,7 +755,7 @@ class MacPrefixHeader(object):
result.append((source, obj, self._Gch(lang))) result.append((source, obj, self._Gch(lang)))
return result return result
def GetGchBuildCommands(self): def GetPchBuildCommands(self):
"""Returns [(path_to_gch, language_flag, language, header)]. """Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory. |path_to_gch| and |header| are relative to the build directory.
""" """
@ -788,7 +883,7 @@ def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
return info_plist, dest_plist, defines, extra_env return info_plist, dest_plist, defines, extra_env
def GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None): additional_settings=None):
"""Return the environment variables that Xcode would set. See """Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
@ -839,6 +934,13 @@ def GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if not additional_settings: if not additional_settings:
additional_settings = {} additional_settings = {}
else: else:
@ -873,17 +975,17 @@ def _NormalizeEnvVarReferences(str):
def ExpandEnvVars(string, expansions): def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions dict. If the variable expands to something that references expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env -- another variable, this variable is expanded as well if it's in env --
until no variables present in env are left.""" until no variables present in env are left."""
for k in reversed(TopologicallySortedEnvVarKeys(expansions)): for k, v in reversed(expansions):
string = string.replace('${' + k + '}', expansions[k]) string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', expansions[k]) string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, expansions[k]) string = string.replace('$' + k, v)
return string return string
def TopologicallySortedEnvVarKeys(env): def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys, """Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1]. env such that key2 is after key1 in L if env[key2] refers to env[key1].
@ -894,62 +996,37 @@ def TopologicallySortedEnvVarKeys(env):
# order is important. Below is the logic to compute the dependency graph # order is important. Below is the logic to compute the dependency graph
# and sort it. # and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}') regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
# First sort the list of keys. try:
key_list = sorted(env.keys()) # Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise Exception(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
# Phase 1: Create a set of edges of (DEPENDEE, DEPENDER) where in the graph,
# DEPENDEE -> DEPENDER. Also create sets of dependers and dependees.
edges = set()
dependees = set()
dependers = set()
for k in key_list:
matches = regex.findall(env[k])
if not len(matches):
continue
depends_on_other_var = False def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
for dependee in matches: configuration, additional_settings=None):
assert '${' not in dependee, 'Nested variables not supported: ' + dependee env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
if dependee in env: additional_settings)
edges.add((dependee, k)) return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
dependees.add(dependee)
depends_on_other_var = True
if depends_on_other_var: def GetSpecPostbuildCommands(spec, quiet=False):
dependers.add(k)
# Phase 2: Create a list of graph nodes with no incoming edges.
sorted_nodes = []
edgeless_nodes = dependees - dependers
# Phase 3: Perform Kahn topological sort.
while len(edgeless_nodes):
# Find a node with no incoming edges, add it to the sorted list, and
# remove it from the list of nodes that aren't part of the graph.
node = edgeless_nodes.pop()
sorted_nodes.append(node)
key_list.remove(node)
# Find all the edges between |node| and other nodes.
edges_to_node = [e for e in edges if e[0] == node]
for edge in edges_to_node:
edges.remove(edge)
# If the node connected to |node| by |edge| has no other incoming edges,
# add it to |edgeless_nodes|.
if not len([e for e in edges if e[1] == edge[1]]):
edgeless_nodes.add(edge[1])
# Any remaining edges indicate a cycle.
if len(edges):
raise Exception('Xcode environment variables are cyclically dependent: ' +
str(edges))
# Append the "nodes" not in the graph to those that were just sorted.
sorted_nodes.extend(key_list)
return sorted_nodes
def GetSpecPostbuildCommands(spec, gyp_path_to_build_path, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form """Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell.""" executable by a shell."""
postbuilds = [] postbuilds = []
@ -957,16 +1034,5 @@ def GetSpecPostbuildCommands(spec, gyp_path_to_build_path, quiet=False):
if not quiet: if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % ( postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name'])) spec['target_name'], postbuild['postbuild_name']))
shell_list = postbuild['action'][:] postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
# The first element is the command. If it's a relative path, it's
# a script in the source tree relative to the gyp file and needs to be
# absolutified. Else, it's in the PATH (e.g. install_name_tool, ln).
if os.path.sep in shell_list[0]:
shell_list[0] = gyp_path_to_build_path(shell_list[0])
# "script.sh" -> "./script.sh"
if not os.path.sep in shell_list[0]:
shell_list[0] = os.path.join('.', shell_list[0])
postbuilds.append(gyp.common.EncodePOSIXShellList(shell_list))
return postbuilds return postbuilds

44
tools/gyp/pylib/gyp/xcodeproj_file.py

@ -419,7 +419,7 @@ class XCObject(object):
hash.update(struct.pack('>i', len(data))) hash.update(struct.pack('>i', len(data)))
hash.update(data) hash.update(data)
if hash == None: if hash is None:
hash = _new_sha1() hash = _new_sha1()
hashables = self.Hashables() hashables = self.Hashables()
@ -431,7 +431,7 @@ class XCObject(object):
for child in self.Children(): for child in self.Children():
child.ComputeIDs(recursive, overwrite, hash.copy()) child.ComputeIDs(recursive, overwrite, hash.copy())
if overwrite or self.id == None: if overwrite or self.id is None:
# Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
# is 160 bits. Instead of throwing out 64 bits of the digest, xor them # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
# into the portion that gets used. # into the portion that gets used.
@ -736,7 +736,7 @@ class XCObject(object):
references added. references added.
""" """
if properties == None: if properties is None:
return return
for property, value in properties.iteritems(): for property, value in properties.iteritems():
@ -918,7 +918,7 @@ class XCHierarchicalElement(XCObject):
self._properties['sourceTree'] = source_tree self._properties['sourceTree'] = source_tree
if path != None: if path != None:
self._properties['path'] = path self._properties['path'] = path
if source_tree != None and path == None and \ if source_tree != None and path is None and \
not 'name' in self._properties: not 'name' in self._properties:
# The path was of the form "$(SDKROOT)" with no path following it. # The path was of the form "$(SDKROOT)" with no path following it.
# This object is now relative to that variable, so it has no path # This object is now relative to that variable, so it has no path
@ -1068,7 +1068,7 @@ class XCHierarchicalElement(XCObject):
xche = self xche = self
path = None path = None
while isinstance(xche, XCHierarchicalElement) and \ while isinstance(xche, XCHierarchicalElement) and \
(path == None or \ (path is None or \
(not path.startswith('/') and not path.startswith('$'))): (not path.startswith('/') and not path.startswith('$'))):
this_path = xche.PathFromSourceTreeAndPath() this_path = xche.PathFromSourceTreeAndPath()
if this_path != None and path != None: if this_path != None and path != None:
@ -1222,7 +1222,7 @@ class PBXGroup(XCHierarchicalElement):
grandparent = None grandparent = None
# Putting a directory inside a variant group is not currently supported. # Putting a directory inside a variant group is not currently supported.
assert not is_dir or variant_name == None assert not is_dir or variant_name is None
path_split = path.split(posixpath.sep) path_split = path.split(posixpath.sep)
if len(path_split) == 1 or \ if len(path_split) == 1 or \
@ -1230,7 +1230,7 @@ class PBXGroup(XCHierarchicalElement):
not hierarchical: not hierarchical:
# The PBXFileReference or PBXVariantGroup will be added to or gotten from # The PBXFileReference or PBXVariantGroup will be added to or gotten from
# this PBXGroup, no recursion necessary. # this PBXGroup, no recursion necessary.
if variant_name == None: if variant_name is None:
# Add or get a PBXFileReference. # Add or get a PBXFileReference.
file_ref = self.GetChildByPath(normpath) file_ref = self.GetChildByPath(normpath)
if file_ref != None: if file_ref != None:
@ -1583,14 +1583,14 @@ class XCConfigurationList(XCObject):
value = None value = None
for configuration in self._properties['buildConfigurations']: for configuration in self._properties['buildConfigurations']:
configuration_has = configuration.HasBuildSetting(key) configuration_has = configuration.HasBuildSetting(key)
if has == None: if has is None:
has = configuration_has has = configuration_has
elif has != configuration_has: elif has != configuration_has:
return -1 return -1
if configuration_has: if configuration_has:
configuration_value = configuration.GetBuildSetting(key) configuration_value = configuration.GetBuildSetting(key)
if value == None: if value is None:
value = configuration_value value = configuration_value
elif value != configuration_value: elif value != configuration_value:
return -1 return -1
@ -1613,7 +1613,7 @@ class XCConfigurationList(XCObject):
value = None value = None
for configuration in self._properties['buildConfigurations']: for configuration in self._properties['buildConfigurations']:
configuration_value = configuration.GetBuildSetting(key) configuration_value = configuration.GetBuildSetting(key)
if value == None: if value is None:
value = configuration_value value = configuration_value
else: else:
if value != configuration_value: if value != configuration_value:
@ -1941,7 +1941,7 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
if path_tree in self.path_tree_to_subfolder: if path_tree in self.path_tree_to_subfolder:
subfolder = self.path_tree_to_subfolder[path_tree] subfolder = self.path_tree_to_subfolder[path_tree]
if relative_path == None: if relative_path is None:
relative_path = '' relative_path = ''
else: else:
# The path starts with an unrecognized Xcode variable # The path starts with an unrecognized Xcode variable
@ -2117,8 +2117,7 @@ class XCTarget(XCRemoteObject):
pbxproject = self.PBXProjectAncestor() pbxproject = self.PBXProjectAncestor()
other_pbxproject = other.PBXProjectAncestor() other_pbxproject = other.PBXProjectAncestor()
if pbxproject == other_pbxproject: if pbxproject == other_pbxproject:
# The easy case. Add a dependency to another target in the same # Add a dependency to another target in the same project file.
# project file.
container = PBXContainerItemProxy({'containerPortal': pbxproject, container = PBXContainerItemProxy({'containerPortal': pbxproject,
'proxyType': 1, 'proxyType': 1,
'remoteGlobalIDString': other, 'remoteGlobalIDString': other,
@ -2127,8 +2126,7 @@ class XCTarget(XCRemoteObject):
'targetProxy': container}) 'targetProxy': container})
self.AppendProperty('dependencies', dependency) self.AppendProperty('dependencies', dependency)
else: else:
# The hard case. Add a dependency to a target in a different project # Add a dependency to a target in a different project file.
# file. Actually, this case isn't really so hard.
other_project_ref = \ other_project_ref = \
pbxproject.AddOrGetProjectReference(other_pbxproject)[1] pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
container = PBXContainerItemProxy({ container = PBXContainerItemProxy({
@ -2257,7 +2255,7 @@ class PBXNativeTarget(XCTarget):
self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle') self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
self.SetBuildSetting('DYLIB_CURRENT_VERSION', '') self.SetBuildSetting('DYLIB_CURRENT_VERSION', '')
self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '') self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '')
if force_extension == None: if force_extension is None:
force_extension = suffix[1:] force_extension = suffix[1:]
if force_extension is not None: if force_extension is not None:
@ -2327,14 +2325,14 @@ class PBXNativeTarget(XCTarget):
# this function is intended as an aid to GetBuildPhaseByType. Loop # this function is intended as an aid to GetBuildPhaseByType. Loop
# over the entire list of phases and assert if more than one of the # over the entire list of phases and assert if more than one of the
# desired type is found. # desired type is found.
assert the_phase == None assert the_phase is None
the_phase = phase the_phase = phase
return the_phase return the_phase
def HeadersPhase(self): def HeadersPhase(self):
headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase) headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase)
if headers_phase == None: if headers_phase is None:
headers_phase = PBXHeadersBuildPhase() headers_phase = PBXHeadersBuildPhase()
# The headers phase should come before the resources, sources, and # The headers phase should come before the resources, sources, and
@ -2355,7 +2353,7 @@ class PBXNativeTarget(XCTarget):
def ResourcesPhase(self): def ResourcesPhase(self):
resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
if resources_phase == None: if resources_phase is None:
resources_phase = PBXResourcesBuildPhase() resources_phase = PBXResourcesBuildPhase()
# The resources phase should come before the sources and frameworks # The resources phase should come before the sources and frameworks
@ -2375,7 +2373,7 @@ class PBXNativeTarget(XCTarget):
def SourcesPhase(self): def SourcesPhase(self):
sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
if sources_phase == None: if sources_phase is None:
sources_phase = PBXSourcesBuildPhase() sources_phase = PBXSourcesBuildPhase()
self.AppendProperty('buildPhases', sources_phase) self.AppendProperty('buildPhases', sources_phase)
@ -2383,7 +2381,7 @@ class PBXNativeTarget(XCTarget):
def FrameworksPhase(self): def FrameworksPhase(self):
frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
if frameworks_phase == None: if frameworks_phase is None:
frameworks_phase = PBXFrameworksBuildPhase() frameworks_phase = PBXFrameworksBuildPhase()
self.AppendProperty('buildPhases', frameworks_phase) self.AppendProperty('buildPhases', frameworks_phase)
@ -2492,7 +2490,7 @@ class PBXProject(XCContainerPortal):
main_group = self._properties['mainGroup'] main_group = self._properties['mainGroup']
group = main_group.GetChildByName(name) group = main_group.GetChildByName(name)
if group == None: if group is None:
group = PBXGroup({'name': name}) group = PBXGroup({'name': name})
main_group.AppendChild(group) main_group.AppendChild(group)
@ -2696,7 +2694,7 @@ class PBXProject(XCContainerPortal):
continue continue
other_fileref = target._properties['productReference'] other_fileref = target._properties['productReference']
if product_group.GetChildByRemoteObject(other_fileref) == None: if product_group.GetChildByRemoteObject(other_fileref) is None:
# Xcode sets remoteInfo to the name of the target and not the name # Xcode sets remoteInfo to the name of the target and not the name
# of its product, despite this proxy being a reference to the product. # of its product, despite this proxy being a reference to the product.
container_item = PBXContainerItemProxy({ container_item = PBXContainerItemProxy({

23
tools/gyp/test/actions-bare/gyptest-bare.py

@ -1,23 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies actions which are not depended on by other targets get executed.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('bare.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('bare.gyp', chdir='relocate/src')
file_content = 'Hello from bare.py\n'
test.built_file_must_match('out.txt', file_content, chdir='relocate/src')
test.pass_test()

25
tools/gyp/test/actions-bare/src/bare.gyp

@ -1,25 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'bare',
'type': 'none',
'actions': [
{
'action_name': 'action1',
'inputs': [
'bare.py',
],
'outputs': [
'<(PRODUCT_DIR)/out.txt',
],
'action': ['python', 'bare.py', '<(PRODUCT_DIR)/out.txt'],
'msvs_cygwin_shell': 0,
},
],
},
],
}

11
tools/gyp/test/actions-bare/src/bare.py

@ -1,11 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
f = open(sys.argv[1], 'wb')
f.write('Hello from bare.py\n')
f.close()

42
tools/gyp/test/actions-multiple/gyptest-all.py

@ -1,42 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies two actions can be attached to the same input files.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('actions.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Test that two actions can be attached to the same inputs.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_contain('relocate/src/output1.txt', 'hello there')
test.must_contain('relocate/src/output2.txt', 'hello there')
test.must_contain('relocate/src/output3.txt', 'hello there')
test.must_contain('relocate/src/output4.txt', 'hello there')
# Test that process_outputs_as_sources works in conjuction with merged
# actions.
test.run_built_executable(
'multiple_action_source_filter',
chdir='relocate/src',
stdout=(
'{\n'
'bar\n'
'car\n'
'dar\n'
'ear\n'
'}\n'
),
)
test.pass_test()

165
tools/gyp/test/actions-multiple/src/actions.gyp

@ -1,165 +0,0 @@
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
# Have a long string so that actions will exceed xp 512 character
# command limit on xp.
'long_string':
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
'abcdefghijklmnopqrstuvwxyz0123456789'
},
'targets': [
{
'target_name': 'multiple_action_target',
'type': 'none',
'actions': [
{
'action_name': 'action1',
'inputs': [
'copy.py',
'input.txt',
],
'outputs': [
'output1.txt',
],
'action': [
'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
{
'action_name': 'action2',
'inputs': [
'copy.py',
'input.txt',
],
'outputs': [
'output2.txt',
],
'action': [
'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
{
'action_name': 'action3',
'inputs': [
'copy.py',
'input.txt',
],
'outputs': [
'output3.txt',
],
'action': [
'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
{
'action_name': 'action4',
'inputs': [
'copy.py',
'input.txt',
],
'outputs': [
'output4.txt',
],
'action': [
'python', '<@(_inputs)', '<(_outputs)', '<(long_string)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
{
'target_name': 'multiple_action_source_filter',
'type': 'executable',
'sources': [
'main.c',
# TODO(bradnelson): add foo.c here once this issue is fixed:
# http://code.google.com/p/gyp/issues/detail?id=175
],
'actions': [
{
'action_name': 'action1',
'inputs': [
'foo.c',
'filter.py',
],
'outputs': [
'<(INTERMEDIATE_DIR)/output1.c',
],
'process_outputs_as_sources': 1,
'action': [
'python', 'filter.py', 'foo', 'bar', 'foo.c', '<@(_outputs)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
{
'action_name': 'action2',
'inputs': [
'foo.c',
'filter.py',
],
'outputs': [
'<(INTERMEDIATE_DIR)/output2.c',
],
'process_outputs_as_sources': 1,
'action': [
'python', 'filter.py', 'foo', 'car', 'foo.c', '<@(_outputs)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
{
'action_name': 'action3',
'inputs': [
'foo.c',
'filter.py',
],
'outputs': [
'<(INTERMEDIATE_DIR)/output3.c',
],
'process_outputs_as_sources': 1,
'action': [
'python', 'filter.py', 'foo', 'dar', 'foo.c', '<@(_outputs)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
{
'action_name': 'action4',
'inputs': [
'foo.c',
'filter.py',
],
'outputs': [
'<(INTERMEDIATE_DIR)/output4.c',
],
'process_outputs_as_sources': 1,
'action': [
'python', 'filter.py', 'foo', 'ear', 'foo.c', '<@(_outputs)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
],
}

9
tools/gyp/test/actions-multiple/src/copy.py

@ -1,9 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import shutil
import sys
shutil.copyfile(sys.argv[1], sys.argv[2])

12
tools/gyp/test/actions-multiple/src/filter.py

@ -1,12 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
data = open(sys.argv[3], 'r').read()
fh = open(sys.argv[4], 'w')
fh.write(data.replace(sys.argv[1], sys.argv[2]))
fh.close()

11
tools/gyp/test/actions-multiple/src/foo.c

@ -1,11 +0,0 @@
/*
* Copyright (c) 2011 Google Inc. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include <stdio.h>
void foo(void) {
printf("foo\n");
}

1
tools/gyp/test/actions-multiple/src/input.txt

@ -1 +0,0 @@
hello there

22
tools/gyp/test/actions-multiple/src/main.c

@ -1,22 +0,0 @@
/*
* Copyright (c) 2011 Google Inc. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include <stdio.h>
void bar(void);
void car(void);
void dar(void);
void ear(void);
int main() {
printf("{\n");
bar();
car();
dar();
ear();
printf("}\n");
return 0;
}

26
tools/gyp/test/actions-subdir/gyptest-action.py

@ -1,26 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Test actions that output to PRODUCT_DIR.
"""
import TestGyp
# TODO fix this for xcode: http://code.google.com/p/gyp/issues/detail?id=88
test = TestGyp.TestGyp(formats=['!xcode'])
test.run_gyp('none.gyp', chdir='src')
test.build('none.gyp', test.ALL, chdir='src')
file_content = 'Hello from make-file.py\n'
subdir_file_content = 'Hello from make-subdir-file.py\n'
test.built_file_must_match('file.out', file_content, chdir='src')
test.built_file_must_match('subdir_file.out', subdir_file_content, chdir='src')
test.pass_test()

11
tools/gyp/test/actions-subdir/src/make-file.py

@ -1,11 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = 'Hello from make-file.py\n'
open(sys.argv[1], 'wb').write(contents)

31
tools/gyp/test/actions-subdir/src/none.gyp

@ -1,31 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'file',
'type': 'none',
'msvs_cygwin_shell': 0,
'actions': [
{
'action_name': 'make-file',
'inputs': [
'make-file.py',
],
'outputs': [
'<(PRODUCT_DIR)/file.out',
],
'action': [
'python', '<(_inputs)', '<@(_outputs)',
],
'process_outputs_as_sources': 1,
}
],
'dependencies': [
'subdir/subdir.gyp:subdir_file',
],
},
],
}

11
tools/gyp/test/actions-subdir/src/subdir/make-subdir-file.py

@ -1,11 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = 'Hello from make-subdir-file.py\n'
open(sys.argv[1], 'wb').write(contents)

28
tools/gyp/test/actions-subdir/src/subdir/subdir.gyp

@ -1,28 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'subdir_file',
'type': 'none',
'msvs_cygwin_shell': 0,
'actions': [
{
'action_name': 'make-subdir-file',
'inputs': [
'make-subdir-file.py',
],
'outputs': [
'<(PRODUCT_DIR)/subdir_file.out',
],
'action': [
'python', '<(_inputs)', '<@(_outputs)',
],
'process_outputs_as_sources': 1,
}
],
},
],
}

101
tools/gyp/test/actions/gyptest-all.py

@ -1,101 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple actions when using an explicit build target of 'all'.
"""
import glob
import os
import TestGyp
test = TestGyp.TestGyp(workdir='workarea_all')
test.run_gyp('actions.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Some gyp files use an action that mentions an output but never
# writes it as a means to making the action run on every build. That
# doesn't mesh well with ninja's semantics. TODO(evan): figure out
# how to work always-run actions in to ninja.
if test.format == 'ninja':
test.build('actions.gyp', test.ALL, chdir='relocate/src')
else:
# Test that an "always run" action increases a counter on multiple
# invocations, and that a dependent action updates in step.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
# The "always run" action only counts to 2, but the dependent target
# will count forever if it's allowed to run. This verifies that the
# dependent target only runs when the "always run" action generates
# new output, not just because the "always run" ran.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
expect = """\
Hello from program.c
Hello from make-prog1.py
Hello from make-prog2.py
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir1'
else:
chdir = 'relocate/src'
test.run_built_executable('program', chdir=chdir, stdout=expect)
test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
expect = "Hello from generate_main.py\n"
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
else:
chdir = 'relocate/src'
test.run_built_executable('null_input', chdir=chdir, stdout=expect)
# Clean out files which may have been created if test.ALL was run.
def clean_dep_files():
for file in (glob.glob('relocate/src/dep_*.txt') +
glob.glob('relocate/src/deps_all_done_*.txt')):
if os.path.exists(file):
os.remove(file)
# Confirm our clean.
clean_dep_files()
test.must_not_exist('relocate/src/dep_1.txt')
test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
# Make sure all deps finish before an action is run on a 'None' target.
# If using the Make builder, add -j to make things more difficult.
arguments = []
if test.format == 'make':
arguments = ['-j']
test.build('actions.gyp', 'action_with_dependencies_123', chdir='relocate/src',
arguments=arguments)
test.must_exist('relocate/src/deps_all_done_first_123.txt')
# Try again with a target that has deps in reverse. Output files from
# previous tests deleted. Confirm this execution did NOT run the ALL
# target which would mess up our dep tests.
clean_dep_files()
test.build('actions.gyp', 'action_with_dependencies_321', chdir='relocate/src',
arguments=arguments)
test.must_exist('relocate/src/deps_all_done_first_321.txt')
test.must_not_exist('relocate/src/deps_all_done_first_123.txt')
test.pass_test()

68
tools/gyp/test/actions/gyptest-default.py

@ -1,68 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple actions when using the default build target.
"""
import TestGyp
test = TestGyp.TestGyp(workdir='workarea_default')
test.run_gyp('actions.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Some gyp files use an action that mentions an output but never
# writes it as a means to making the action run on every build. That
# doesn't mesh well with ninja's semantics. TODO(evan): figure out
# how to work always-run actions in to ninja.
if test.format == 'ninja':
test.build('actions.gyp', test.ALL, chdir='relocate/src')
else:
# Test that an "always run" action increases a counter on multiple
# invocations, and that a dependent action updates in step.
test.build('actions.gyp', chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '1')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '1')
test.build('actions.gyp', chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
# The "always run" action only counts to 2, but the dependent target
# will count forever if it's allowed to run. This verifies that the
# dependent target only runs when the "always run" action generates
# new output, not just because the "always run" ran.
test.build('actions.gyp', test.ALL, chdir='relocate/src')
test.must_match('relocate/src/subdir1/actions-out/action-counter.txt', '2')
test.must_match('relocate/src/subdir1/actions-out/action-counter_2.txt', '2')
expect = """\
Hello from program.c
Hello from make-prog1.py
Hello from make-prog2.py
"""
if test.format == 'xcode':
chdir = 'relocate/src/subdir1'
else:
chdir = 'relocate/src'
test.run_built_executable('program', chdir=chdir, stdout=expect)
test.must_match('relocate/src/subdir2/file.out', "Hello from make-file.py\n")
expect = "Hello from generate_main.py\n"
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
else:
chdir = 'relocate/src'
test.run_built_executable('null_input', chdir=chdir, stdout=expect)
test.pass_test()

24
tools/gyp/test/actions/gyptest-errors.py

@ -1,24 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies behavior for different action configuration errors:
exit status of 1, and the expected error message must be in stderr.
"""
import TestGyp
test = TestGyp.TestGyp(workdir='workarea_errors')
test.run_gyp('action_missing_name.gyp', chdir='src', status=1, stderr=None)
expect = [
"Anonymous action in target broken_actions2. An action must have an 'action_name' field.",
]
test.must_contain_all_lines(test.stderr(), expect)
test.pass_test()

24
tools/gyp/test/actions/src/action_missing_name.gyp

@ -1,24 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'broken_actions2',
'type': 'none',
'actions': [
{
'inputs': [
'no_name.input',
],
'action': [
'python',
'-c',
'print \'missing name\'',
],
},
],
},
],
}

114
tools/gyp/test/actions/src/actions.gyp

@ -1,114 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'pull_in_all_actions',
'type': 'none',
'dependencies': [
'subdir1/executable.gyp:*',
'subdir2/none.gyp:*',
'subdir3/null_input.gyp:*',
],
},
{
'target_name': 'depend_on_always_run_action',
'type': 'none',
'dependencies': [ 'subdir1/executable.gyp:counter' ],
'actions': [
{
'action_name': 'use_always_run_output',
'inputs': [
'subdir1/actions-out/action-counter.txt',
'subdir1/counter.py',
],
'outputs': [
'subdir1/actions-out/action-counter_2.txt',
],
'action': [
'python', 'subdir1/counter.py', '<(_outputs)',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
# Three deps which don't finish immediately.
# Each one has a small delay then creates a file.
# Delays are 1.0, 1.1, and 2.0 seconds.
{
'target_name': 'dep_1',
'type': 'none',
'actions': [{
'inputs': [ 'actions.gyp' ],
'outputs': [ 'dep_1.txt' ],
'action_name': 'dep_1',
'action': [ 'python', '-c',
'import time; time.sleep(1); open(\'dep_1.txt\', \'w\')' ],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
}],
},
{
'target_name': 'dep_2',
'type': 'none',
'actions': [{
'inputs': [ 'actions.gyp' ],
'outputs': [ 'dep_2.txt' ],
'action_name': 'dep_2',
'action': [ 'python', '-c',
'import time; time.sleep(1.1); open(\'dep_2.txt\', \'w\')' ],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
}],
},
{
'target_name': 'dep_3',
'type': 'none',
'actions': [{
'inputs': [ 'actions.gyp' ],
'outputs': [ 'dep_3.txt' ],
'action_name': 'dep_3',
'action': [ 'python', '-c',
'import time; time.sleep(2.0); open(\'dep_3.txt\', \'w\')' ],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
}],
},
# An action which assumes the deps have completed.
# Does NOT list the output files of it's deps as inputs.
# On success create the file deps_all_done_first.txt.
{
'target_name': 'action_with_dependencies_123',
'type': 'none',
'dependencies': [ 'dep_1', 'dep_2', 'dep_3' ],
'actions': [{
'inputs': [ 'actions.gyp' ],
'outputs': [ 'deps_all_done_first_123.txt' ],
'action_name': 'action_with_dependencies_123',
'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
}],
},
# Same as above but with deps in reverse.
{
'target_name': 'action_with_dependencies_321',
'type': 'none',
'dependencies': [ 'dep_3', 'dep_2', 'dep_1' ],
'actions': [{
'inputs': [ 'actions.gyp' ],
'outputs': [ 'deps_all_done_first_321.txt' ],
'action_name': 'action_with_dependencies_321',
'action': [ 'python', 'confirm-dep-files.py', '<(_outputs)' ],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
}],
},
],
}

21
tools/gyp/test/actions/src/confirm-dep-files.py

@ -1,21 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Confirms presence of files generated by our targets we depend on.
If they exist, create a new file.
Note target's input files are explicitly NOT defined in the gyp file
so they can't easily be passed to this script as args.
"""
import os
import sys
outfile = sys.argv[1] # Example value we expect: deps_all_done_first_123.txt
if (os.path.exists("dep_1.txt") and
os.path.exists("dep_2.txt") and
os.path.exists("dep_3.txt")):
open(outfile, "w")

46
tools/gyp/test/actions/src/subdir1/counter.py

@ -1,46 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import time
output = sys.argv[1]
persistoutput = "%s.persist" % sys.argv[1]
count = 0
try:
count = open(persistoutput, 'r').read()
except:
pass
count = int(count) + 1
if len(sys.argv) > 2:
max_count = int(sys.argv[2])
if count > max_count:
count = max_count
oldcount = 0
try:
oldcount = open(output, 'r').read()
except:
pass
# Save the count in a file that is undeclared, and thus hidden, to gyp. We need
# to do this because, prior to running commands, scons deletes any declared
# outputs, so we would lose our count if we just wrote to the given output file.
# (The other option is to use Precious() in the scons generator, but that seems
# too heavy-handed just to support this somewhat unrealistic test case, and
# might lead to unintended side-effects).
open(persistoutput, 'w').write('%d' % (count))
# Only write the given output file if the count has changed.
if int(oldcount) != count:
open(output, 'w').write('%d' % (count))
# Sleep so the next run changes the file time sufficiently to make the build
# detect the file as changed.
time.sleep(1)
sys.exit(0)

74
tools/gyp/test/actions/src/subdir1/executable.gyp

@ -1,74 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'program',
'type': 'executable',
'msvs_cygwin_shell': 0,
'sources': [
'program.c',
],
'actions': [
{
'action_name': 'make-prog1',
'inputs': [
'make-prog1.py',
],
'outputs': [
'<(INTERMEDIATE_DIR)/prog1.c',
],
'action': [
'python', '<(_inputs)', '<@(_outputs)',
],
'process_outputs_as_sources': 1,
},
{
'action_name': 'make-prog2',
'inputs': [
'make-prog2.py',
],
'outputs': [
'actions-out/prog2.c',
],
'action': [
'python', '<(_inputs)', '<@(_outputs)',
],
'process_outputs_as_sources': 1,
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
{
'target_name': 'counter',
'type': 'none',
'actions': [
{
# This action should always run, regardless of whether or not it's
# inputs or the command-line change. We do this by creating a dummy
# first output, which is always missing, thus causing the build to
# always try to recreate it. Actual output files should be listed
# after the dummy one, and dependent targets should list the real
# output(s) in their inputs
# (see '../actions.gyp:depend_on_always_run_action').
'action_name': 'action_counter',
'inputs': [
'counter.py',
],
'outputs': [
'actions-out/action-counter.txt.always',
'actions-out/action-counter.txt',
],
'action': [
'python', '<(_inputs)', 'actions-out/action-counter.txt', '2',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
],
}

20
tools/gyp/test/actions/src/subdir1/make-prog1.py

@ -1,20 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = r"""
#include <stdio.h>
void prog1(void)
{
printf("Hello from make-prog1.py\n");
}
"""
open(sys.argv[1], 'w').write(contents)
sys.exit(0)

20
tools/gyp/test/actions/src/subdir1/make-prog2.py

@ -1,20 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = r"""
#include <stdio.h>
void prog2(void)
{
printf("Hello from make-prog2.py\n");
}
"""
open(sys.argv[1], 'w').write(contents)
sys.exit(0)

12
tools/gyp/test/actions/src/subdir1/program.c

@ -1,12 +0,0 @@
#include <stdio.h>
extern void prog1(void);
extern void prog2(void);
int main(int argc, char *argv[])
{
printf("Hello from program.c\n");
prog1();
prog2();
return 0;
}

11
tools/gyp/test/actions/src/subdir2/make-file.py

@ -1,11 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = "Hello from make-file.py\n"
open(sys.argv[1], 'wb').write(contents)

33
tools/gyp/test/actions/src/subdir2/none.gyp

@ -1,33 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'file',
'type': 'none',
'msvs_cygwin_shell': 0,
'actions': [
{
'action_name': 'make-file',
'inputs': [
'make-file.py',
],
'outputs': [
'file.out',
# TODO: enhance testing infrastructure to test this
# without having to hard-code the intermediate dir paths.
#'<(INTERMEDIATE_DIR)/file.out',
],
'action': [
'python', '<(_inputs)', '<@(_outputs)',
],
'process_outputs_as_sources': 1,
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
}
],
},
],
}

21
tools/gyp/test/actions/src/subdir3/generate_main.py

@ -1,21 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
contents = """
#include <stdio.h>
int main(int argc, char *argv[])
{
printf("Hello from generate_main.py\\n");
return 0;
}
"""
open(sys.argv[1], 'w').write(contents)
sys.exit(0)

29
tools/gyp/test/actions/src/subdir3/null_input.gyp

@ -1,29 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'null_input',
'type': 'executable',
'msvs_cygwin_shell': 0,
'actions': [
{
'action_name': 'generate_main',
'process_outputs_as_sources': 1,
'inputs': [],
'outputs': [
'<(INTERMEDIATE_DIR)/main.c',
],
'action': [
# TODO: we can't just use <(_outputs) here?!
'python', 'generate_main.py', '<(INTERMEDIATE_DIR)/main.c',
],
# Allows the test to run without hermetic cygwin on windows.
'msvs_cygwin_shell': 0,
},
],
},
],
}

55
tools/gyp/test/additional-targets/gyptest-additional.py

@ -1,55 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple actions when using an explicit build target of 'all'.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('all.gyp', chdir='src')
test.relocate('src', 'relocate/src')
# Build all.
test.build('all.gyp', chdir='relocate/src')
if test.format=='xcode':
chdir = 'relocate/src/dir1'
else:
chdir = 'relocate/src'
# Output is as expected.
file_content = 'Hello from emit.py\n'
test.built_file_must_match('out2.txt', file_content, chdir=chdir)
test.built_file_must_not_exist('out.txt', chdir='relocate/src')
test.built_file_must_not_exist('foolib1',
type=test.SHARED_LIB,
chdir=chdir)
# TODO(mmoss) Make consistent with scons, with 'dir1' before 'out/Default'?
if test.format in ('make', 'ninja'):
chdir='relocate/src'
else:
chdir='relocate/src/dir1'
# Build the action explicitly.
test.build('actions.gyp', 'action1_target', chdir=chdir)
# Check that things got run.
file_content = 'Hello from emit.py\n'
test.built_file_must_exist('out.txt', chdir=chdir)
# Build the shared library explicitly.
test.build('actions.gyp', 'foolib1', chdir=chdir)
test.built_file_must_exist('foolib1',
type=test.SHARED_LIB,
chdir=chdir)
test.pass_test()

13
tools/gyp/test/additional-targets/src/all.gyp

@ -1,13 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'all_targets',
'type': 'none',
'dependencies': ['dir1/actions.gyp:*'],
},
],
}

56
tools/gyp/test/additional-targets/src/dir1/actions.gyp

@ -1,56 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'action1_target',
'type': 'none',
'suppress_wildcard': 1,
'actions': [
{
'action_name': 'action1',
'inputs': [
'emit.py',
],
'outputs': [
'<(PRODUCT_DIR)/out.txt',
],
'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out.txt'],
'msvs_cygwin_shell': 0,
},
],
},
{
'target_name': 'action2_target',
'type': 'none',
'actions': [
{
'action_name': 'action2',
'inputs': [
'emit.py',
],
'outputs': [
'<(PRODUCT_DIR)/out2.txt',
],
'action': ['python', 'emit.py', '<(PRODUCT_DIR)/out2.txt'],
'msvs_cygwin_shell': 0,
},
],
},
{
'target_name': 'foolib1',
'type': 'shared_library',
'suppress_wildcard': 1,
'sources': ['lib1.c'],
},
],
'conditions': [
['OS=="linux"', {
'target_defaults': {
'cflags': ['-fPIC'],
},
}],
],
}

11
tools/gyp/test/additional-targets/src/dir1/emit.py

@ -1,11 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
f = open(sys.argv[1], 'wb')
f.write('Hello from emit.py\n')
f.close()

6
tools/gyp/test/additional-targets/src/dir1/lib1.c

@ -1,6 +0,0 @@
#ifdef _WIN32
__declspec(dllexport)
#endif
int func1(void) {
return 42;
}

31
tools/gyp/test/assembly/gyptest-assembly.py

@ -1,31 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .hpp files are ignored when included in the source list on all
platforms.
"""
import sys
import TestGyp
# TODO(bradnelson): get this working for windows.
test = TestGyp.TestGyp(formats=['make', 'ninja', 'scons', 'xcode'])
test.run_gyp('assembly.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('assembly.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Got 42.
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()

4
tools/gyp/test/assembly/src/as.bat

@ -1,4 +0,0 @@
@echo off
:: Mock windows assembler.
cl /c %1 /Fo"%2"

59
tools/gyp/test/assembly/src/assembly.gyp

@ -1,59 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'target_defaults': {
'conditions': [
['OS=="win"', {
'defines': ['PLATFORM_WIN'],
}],
['OS=="mac"', {
'defines': ['PLATFORM_MAC'],
}],
['OS=="linux"', {
'defines': ['PLATFORM_LINUX'],
}],
],
},
'targets': [
{
'target_name': 'program',
'type': 'executable',
'dependencies': ['lib1'],
'sources': [
'program.c',
],
},
{
'target_name': 'lib1',
'type': 'static_library',
'sources': [
'lib1.S',
],
},
],
'conditions': [
['OS=="win"', {
'target_defaults': {
'rules': [
{
'rule_name': 'assembler',
'msvs_cygwin_shell': 0,
'extension': 'S',
'inputs': [
'as.bat',
],
'outputs': [
'<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).obj',
],
'action':
['as.bat', 'lib1.c', '<(_outputs)'],
'message': 'Building assembly file <(RULE_INPUT_PATH)',
'process_outputs_as_sources': 1,
},
],
},
},],
],
}

10
tools/gyp/test/assembly/src/lib1.S

@ -1,10 +0,0 @@
#if PLATFORM_WINDOWS || PLATFORM_MAC
# define IDENTIFIER(n) _##n
#else /* Linux */
# define IDENTIFIER(n) n
#endif
.globl IDENTIFIER(lib1_function)
IDENTIFIER(lib1_function):
movl $42, %eax
ret

3
tools/gyp/test/assembly/src/lib1.c

@ -1,3 +0,0 @@
int lib1_function(void) {
return 42;
}

12
tools/gyp/test/assembly/src/program.c

@ -1,12 +0,0 @@
#include <stdio.h>
extern int lib1_function(void);
int main(int argc, char *argv[])
{
fprintf(stdout, "Hello from program.c\n");
fflush(stdout);
fprintf(stdout, "Got %d.\n", lib1_function());
fflush(stdout);
return 0;
}

77
tools/gyp/test/builddir/gyptest-all.py

@ -1,77 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify the settings that cause a set of programs to be created in
a specific build directory, and that no intermediate built files
get created outside of that build directory hierarchy even when
referred to with deeply-nested ../../.. paths.
"""
import TestGyp
# TODO(mmoss): Make only supports (theoretically) a single, global build
# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
# generators support, so this doesn't work yet for make.
# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
# the "--depth" location, which is one level above 'src', but then this test
# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
# its sources. I'm not sure if make is wrong for writing outside the current
# directory, or if the test is wrong for assuming everything generated is under
# the current directory.
test = TestGyp.TestGyp(formats=['!make', '!ninja'])
test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
test.relocate('src', 'relocate/src')
test.subdir('relocate/builddir')
# Make sure that all the built ../../etc. files only get put under builddir,
# by making all of relocate read-only and then making only builddir writable.
test.writable('relocate', False)
test.writable('relocate/builddir', True)
# Suppress the test infrastructure's setting SYMROOT on the command line.
test.build('prog1.gyp', test.ALL, SYMROOT=None, chdir='relocate/src')
expect1 = """\
Hello from prog1.c
Hello from func1.c
"""
expect2 = """\
Hello from subdir2/prog2.c
Hello from func2.c
"""
expect3 = """\
Hello from subdir2/subdir3/prog3.c
Hello from func3.c
"""
expect4 = """\
Hello from subdir2/subdir3/subdir4/prog4.c
Hello from func4.c
"""
expect5 = """\
Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
Hello from func5.c
"""
def run_builddir(prog, expect):
dir = 'relocate/builddir/Default/'
test.run(program=test.workpath(dir + prog), stdout=expect)
run_builddir('prog1', expect1)
run_builddir('prog2', expect2)
run_builddir('prog3', expect3)
run_builddir('prog4', expect4)
run_builddir('prog5', expect5)
test.pass_test()

77
tools/gyp/test/builddir/gyptest-default.py

@ -1,77 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify the settings that cause a set of programs to be created in
a specific build directory, and that no intermediate built files
get created outside of that build directory hierarchy even when
referred to with deeply-nested ../../.. paths.
"""
import TestGyp
# TODO(mmoss): Make only supports (theoretically) a single, global build
# directory (through GYP_GENERATOR_FLAGS 'output_dir'), rather than
# gyp-file-specific settings (e.g. the stuff in builddir.gypi) that the other
# generators support, so this doesn't work yet for make.
# TODO(mmoss) Make also has the issue that the top-level Makefile is written to
# the "--depth" location, which is one level above 'src', but then this test
# moves 'src' somewhere else, leaving the Makefile behind, so make can't find
# its sources. I'm not sure if make is wrong for writing outside the current
# directory, or if the test is wrong for assuming everything generated is under
# the current directory.
test = TestGyp.TestGyp(formats=['!make', '!ninja'])
test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
test.relocate('src', 'relocate/src')
test.subdir('relocate/builddir')
# Make sure that all the built ../../etc. files only get put under builddir,
# by making all of relocate read-only and then making only builddir writable.
test.writable('relocate', False)
test.writable('relocate/builddir', True)
# Suppress the test infrastructure's setting SYMROOT on the command line.
test.build('prog1.gyp', SYMROOT=None, chdir='relocate/src')
expect1 = """\
Hello from prog1.c
Hello from func1.c
"""
expect2 = """\
Hello from subdir2/prog2.c
Hello from func2.c
"""
expect3 = """\
Hello from subdir2/subdir3/prog3.c
Hello from func3.c
"""
expect4 = """\
Hello from subdir2/subdir3/subdir4/prog4.c
Hello from func4.c
"""
expect5 = """\
Hello from subdir2/subdir3/subdir4/subdir5/prog5.c
Hello from func5.c
"""
def run_builddir(prog, expect):
dir = 'relocate/builddir/Default/'
test.run(program=test.workpath(dir + prog), stdout=expect)
run_builddir('prog1', expect1)
run_builddir('prog2', expect2)
run_builddir('prog3', expect3)
run_builddir('prog4', expect4)
run_builddir('prog5', expect5)
test.pass_test()

21
tools/gyp/test/builddir/src/builddir.gypi

@ -1,21 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'target_defaults': {
'configurations': {
'Default': {
'msvs_configuration_attributes': {
'OutputDirectory': '<(DEPTH)\\builddir\Default',
},
},
},
},
'scons_settings': {
'sconsbuild_dir': '<(DEPTH)/builddir',
},
'xcode_settings': {
'SYMROOT': '<(DEPTH)/builddir',
},
}

6
tools/gyp/test/builddir/src/func1.c

@ -1,6 +0,0 @@
#include <stdio.h>
void func1(void)
{
printf("Hello from func1.c\n");
}

6
tools/gyp/test/builddir/src/func2.c

@ -1,6 +0,0 @@
#include <stdio.h>
void func2(void)
{
printf("Hello from func2.c\n");
}

6
tools/gyp/test/builddir/src/func3.c

@ -1,6 +0,0 @@
#include <stdio.h>
void func3(void)
{
printf("Hello from func3.c\n");
}

6
tools/gyp/test/builddir/src/func4.c

@ -1,6 +0,0 @@
#include <stdio.h>
void func4(void)
{
printf("Hello from func4.c\n");
}

6
tools/gyp/test/builddir/src/func5.c

@ -1,6 +0,0 @@
#include <stdio.h>
void func5(void)
{
printf("Hello from func5.c\n");
}

10
tools/gyp/test/builddir/src/prog1.c

@ -1,10 +0,0 @@
#include <stdio.h>
extern void func1(void);
int main(int argc, char *argv[])
{
printf("Hello from prog1.c\n");
func1();
return 0;
}

30
tools/gyp/test/builddir/src/prog1.gyp

@ -1,30 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'builddir.gypi',
],
'targets': [
{
'target_name': 'pull_in_all',
'type': 'none',
'dependencies': [
'prog1',
'subdir2/prog2.gyp:prog2',
'subdir2/subdir3/prog3.gyp:prog3',
'subdir2/subdir3/subdir4/prog4.gyp:prog4',
'subdir2/subdir3/subdir4/subdir5/prog5.gyp:prog5',
],
},
{
'target_name': 'prog1',
'type': 'executable',
'sources': [
'prog1.c',
'func1.c',
],
},
],
}

10
tools/gyp/test/builddir/src/subdir2/prog2.c

@ -1,10 +0,0 @@
#include <stdio.h>
extern void func2(void);
int main(int argc, char *argv[])
{
printf("Hello from subdir2/prog2.c\n");
func2();
return 0;
}

19
tools/gyp/test/builddir/src/subdir2/prog2.gyp

@ -1,19 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../builddir.gypi',
],
'targets': [
{
'target_name': 'prog2',
'type': 'executable',
'sources': [
'prog2.c',
'../func2.c',
],
},
],
}

10
tools/gyp/test/builddir/src/subdir2/subdir3/prog3.c

@ -1,10 +0,0 @@
#include <stdio.h>
extern void func3(void);
int main(int argc, char *argv[])
{
printf("Hello from subdir2/subdir3/prog3.c\n");
func3();
return 0;
}

19
tools/gyp/test/builddir/src/subdir2/subdir3/prog3.gyp

@ -1,19 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../builddir.gypi',
],
'targets': [
{
'target_name': 'prog3',
'type': 'executable',
'sources': [
'prog3.c',
'../../func3.c',
],
},
],
}

10
tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.c

@ -1,10 +0,0 @@
#include <stdio.h>
extern void func4(void);
int main(int argc, char *argv[])
{
printf("Hello from subdir2/subdir3/subdir4/prog4.c\n");
func4();
return 0;
}

19
tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/prog4.gyp

@ -1,19 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../builddir.gypi',
],
'targets': [
{
'target_name': 'prog4',
'type': 'executable',
'sources': [
'prog4.c',
'../../../func4.c',
],
},
],
}

10
tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.c

@ -1,10 +0,0 @@
#include <stdio.h>
extern void func5(void);
int main(int argc, char *argv[])
{
printf("Hello from subdir2/subdir3/subdir4/subdir5/prog5.c\n");
func5();
return 0;
}

19
tools/gyp/test/builddir/src/subdir2/subdir3/subdir4/subdir5/prog5.gyp

@ -1,19 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../../builddir.gypi',
],
'targets': [
{
'target_name': 'prog5',
'type': 'executable',
'sources': [
'prog5.c',
'../../../../func5.c',
],
},
],
}

15
tools/gyp/test/cflags/cflags.c

@ -1,15 +0,0 @@
/* Copyright (c) 2010 Google Inc. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file. */
#include <stdio.h>
int main(int argc, char *argv[])
{
#ifdef __OPTIMIZE__
printf("Using an optimization flag\n");
#else
printf("Using no optimization flag\n");
#endif
return 0;
}

16
tools/gyp/test/cflags/cflags.gyp

@ -1,16 +0,0 @@
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'cflags',
'type': 'executable',
'opt': '-Os',
'sources': [
'cflags.c',
],
},
],
}

65
tools/gyp/test/cflags/gyptest-cflags.py

@ -1,65 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2010 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies build of an executable with C++ define specified by a gyp define, and
the use of the environment during regeneration when the gyp file changes.
"""
import os
import TestGyp
env_stack = []
def PushEnv():
env_copy = os.environ.copy()
env_stack.append(env_copy)
def PopEnv():
os.eniron=env_stack.pop()
# Regenerating build files when a gyp file changes is currently only supported
# by the make generator.
test = TestGyp.TestGyp(formats=['make'])
try:
PushEnv()
os.environ['CFLAGS'] = '-O0'
test.run_gyp('cflags.gyp')
finally:
# We clear the environ after calling gyp. When the auto-regeneration happens,
# the same define should be reused anyway. Reset to empty string first in
# case the platform doesn't support unsetenv.
PopEnv()
test.build('cflags.gyp')
expect = """\
Using no optimization flag
"""
test.run_built_executable('cflags', stdout=expect)
test.sleep()
try:
PushEnv()
os.environ['CFLAGS'] = '-O2'
test.run_gyp('cflags.gyp')
finally:
# We clear the environ after calling gyp. When the auto-regeneration happens,
# the same define should be reused anyway. Reset to empty string first in
# case the platform doesn't support unsetenv.
PopEnv()
test.build('cflags.gyp')
expect = """\
Using an optimization flag
"""
test.run_built_executable('cflags', stdout=expect)
test.pass_test()

29
tools/gyp/test/compilable/gyptest-headers.py

@ -1,29 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that .hpp files are ignored when included in the source list on all
platforms.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('headers.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('headers.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Hello from lib1.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()

26
tools/gyp/test/compilable/src/headers.gyp

@ -1,26 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'program',
'type': 'executable',
'dependencies': [
'lib1'
],
'sources': [
'program.cpp',
],
},
{
'target_name': 'lib1',
'type': 'static_library',
'sources': [
'lib1.hpp',
'lib1.cpp',
],
},
],
}

7
tools/gyp/test/compilable/src/lib1.cpp

@ -1,7 +0,0 @@
#include <stdio.h>
#include "lib1.hpp"
void lib1_function(void) {
fprintf(stdout, "Hello from lib1.c\n");
fflush(stdout);
}

6
tools/gyp/test/compilable/src/lib1.hpp

@ -1,6 +0,0 @@
#ifndef _lib1_hpp
#define _lib1_hpp
extern void lib1_function(void);
#endif

9
tools/gyp/test/compilable/src/program.cpp

@ -1,9 +0,0 @@
#include <stdio.h>
#include "lib1.hpp"
int main(int argc, char *argv[]) {
fprintf(stdout, "Hello from program.c\n");
fflush(stdout);
lib1_function();
return 0;
}

15
tools/gyp/test/configurations/basics/configurations.c

@ -1,15 +0,0 @@
#include <stdio.h>
int main(int argc, char *argv[])
{
#ifdef FOO
printf("Foo configuration\n");
#endif
#ifdef DEBUG
printf("Debug configuration\n");
#endif
#ifdef RELEASE
printf("Release configuration\n");
#endif
return 0;
}

32
tools/gyp/test/configurations/basics/configurations.gyp

@ -1,32 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'configurations',
'type': 'executable',
'sources': [
'configurations.c',
],
'configurations': {
'Debug': {
'defines': [
'DEBUG',
],
},
'Release': {
'defines': [
'RELEASE',
],
},
'Foo': {
'defines': [
'FOO',
],
},
}
},
],
}

29
tools/gyp/test/configurations/basics/gyptest-configurations.py

@ -1,29 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies build of an executable in three different configurations.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('configurations.gyp')
test.set_configuration('Release')
test.build('configurations.gyp')
test.run_built_executable('configurations', stdout="Release configuration\n")
test.set_configuration('Debug')
test.build('configurations.gyp')
test.run_built_executable('configurations', stdout="Debug configuration\n")
test.set_configuration('Foo')
test.build('configurations.gyp')
test.run_built_executable('configurations', stdout="Foo configuration\n")
test.pass_test()

21
tools/gyp/test/configurations/inheritance/configurations.c

@ -1,21 +0,0 @@
#include <stdio.h>
int main(int argc, char *argv[])
{
#ifdef BASE
printf("Base configuration\n");
#endif
#ifdef COMMON
printf("Common configuration\n");
#endif
#ifdef COMMON2
printf("Common2 configuration\n");
#endif
#ifdef DEBUG
printf("Debug configuration\n");
#endif
#ifdef RELEASE
printf("Release configuration\n");
#endif
return 0;
}

40
tools/gyp/test/configurations/inheritance/configurations.gyp

@ -1,40 +0,0 @@
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'target_defaults': {
'configurations': {
'Base': {
'abstract': 1,
'defines': ['BASE'],
},
'Common': {
'abstract': 1,
'inherit_from': ['Base'],
'defines': ['COMMON'],
},
'Common2': {
'abstract': 1,
'defines': ['COMMON2'],
},
'Debug': {
'inherit_from': ['Common', 'Common2'],
'defines': ['DEBUG'],
},
'Release': {
'inherit_from': ['Common', 'Common2'],
'defines': ['RELEASE'],
},
},
},
'targets': [
{
'target_name': 'configurations',
'type': 'executable',
'sources': [
'configurations.c',
],
},
],
}

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save