mirror of https://github.com/lukechilds/node.git
Ryan Dahl
14 years ago
38 changed files with 18555 additions and 34 deletions
@ -0,0 +1,12 @@ |
|||
BUILDTYPE ?= Debug |
|||
|
|||
all: out/Makefile |
|||
$(MAKE) -C out BUILDTYPE=$(BUILDTYPE) |
|||
|
|||
out/Makefile: |
|||
tools/gyp_node -f make |
|||
|
|||
distclean: |
|||
rm -rf out |
|||
|
|||
.PHONY: all distclean |
@ -1,28 +0,0 @@ |
|||
#!/usr/bin/python |
|||
import glob |
|||
import os |
|||
import shlex |
|||
import sys |
|||
|
|||
script_dir = os.path.dirname(__file__) |
|||
node_root = os.path.normpath(os.path.join(script_dir, os.pardir)) |
|||
|
|||
sys.path.insert(0, os.path.join(node_root, 'gyp', 'gyp', 'pylib')) |
|||
import gyp |
|||
|
|||
|
|||
def run_gyp(args): |
|||
rc = gyp.main(args) |
|||
if rc != 0: |
|||
print 'Error running GYP' |
|||
sys.exit(rc) |
|||
|
|||
if __name__ == '__main__': |
|||
args = sys.argv[1:] |
|||
args.append(os.path.join(script_dir, 'all.gyp')) |
|||
args.append('--depth=' + node_root) |
|||
args.append('-Dtarget_arch=x64') |
|||
args.append('-Dcomponent=static_library') |
|||
args.append('-Dlibrary=static_library') |
|||
gyp_args = list(args) |
|||
run_gyp(gyp_args) |
@ -0,0 +1,6 @@ |
|||
# Names should be added to this file like so: |
|||
# Name or Organization <email address> |
|||
|
|||
Google Inc. |
|||
Steven Knight <knight@baldmt.com> |
|||
Ryan Norton <rnorton10@gmail.com> |
@ -0,0 +1,8 @@ |
|||
# DEPS file for gclient use in buildbot execution of gyp tests. |
|||
# |
|||
# (You don't need to use gclient for normal GYP development work.) |
|||
|
|||
deps = { |
|||
"scons": |
|||
"http://src.chromium.org/svn/trunk/src/third_party/scons@44099", |
|||
} |
@ -0,0 +1,27 @@ |
|||
Copyright (c) 2009 Google Inc. All rights reserved. |
|||
|
|||
Redistribution and use in source and binary forms, with or without |
|||
modification, are permitted provided that the following conditions are |
|||
met: |
|||
|
|||
* Redistributions of source code must retain the above copyright |
|||
notice, this list of conditions and the following disclaimer. |
|||
* Redistributions in binary form must reproduce the above |
|||
copyright notice, this list of conditions and the following disclaimer |
|||
in the documentation and/or other materials provided with the |
|||
distribution. |
|||
* Neither the name of Google Inc. nor the names of its |
|||
contributors may be used to endorse or promote products derived from |
|||
this software without specific prior written permission. |
|||
|
|||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
|||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
|||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
|||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
|||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
|||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
|||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
|||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
@ -0,0 +1,10 @@ |
|||
# This file is used by gcl to get repository specific information. |
|||
CODE_REVIEW_SERVER: codereview.chromium.org |
|||
CC_LIST: gyp-developer@googlegroups.com |
|||
VIEW_VC: http://code.google.com/p/gyp/source/detail?r= |
|||
TRY_ON_UPLOAD: True |
|||
TRYSERVER_PROJECT: gyp |
|||
TRYSERVER_PATCHLEVEL: 0 |
|||
TRYSERVER_ROOT: trunk |
|||
TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl |
|||
|
@ -0,0 +1,7 @@ |
|||
/* Copyright (c) 2009 Google Inc. All rights reserved.
|
|||
* Use of this source code is governed by a BSD-style license that can be |
|||
* found in the LICENSE file. */ |
|||
|
|||
int main() { |
|||
return 0; |
|||
} |
@ -0,0 +1,256 @@ |
|||
#!/usr/bin/env python |
|||
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
__doc__ = """ |
|||
gyptest.py -- test runner for GYP tests. |
|||
""" |
|||
|
|||
import os |
|||
import optparse |
|||
import subprocess |
|||
import sys |
|||
|
|||
class CommandRunner: |
|||
""" |
|||
Executor class for commands, including "commands" implemented by |
|||
Python functions. |
|||
""" |
|||
verbose = True |
|||
active = True |
|||
|
|||
def __init__(self, dictionary={}): |
|||
self.subst_dictionary(dictionary) |
|||
|
|||
def subst_dictionary(self, dictionary): |
|||
self._subst_dictionary = dictionary |
|||
|
|||
def subst(self, string, dictionary=None): |
|||
""" |
|||
Substitutes (via the format operator) the values in the specified |
|||
dictionary into the specified command. |
|||
|
|||
The command can be an (action, string) tuple. In all cases, we |
|||
perform substitution on strings and don't worry if something isn't |
|||
a string. (It's probably a Python function to be executed.) |
|||
""" |
|||
if dictionary is None: |
|||
dictionary = self._subst_dictionary |
|||
if dictionary: |
|||
try: |
|||
string = string % dictionary |
|||
except TypeError: |
|||
pass |
|||
return string |
|||
|
|||
def display(self, command, stdout=None, stderr=None): |
|||
if not self.verbose: |
|||
return |
|||
if type(command) == type(()): |
|||
func = command[0] |
|||
args = command[1:] |
|||
s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args))) |
|||
if type(command) == type([]): |
|||
# TODO: quote arguments containing spaces |
|||
# TODO: handle meta characters? |
|||
s = ' '.join(command) |
|||
else: |
|||
s = self.subst(command) |
|||
if not s.endswith('\n'): |
|||
s += '\n' |
|||
sys.stdout.write(s) |
|||
sys.stdout.flush() |
|||
|
|||
def execute(self, command, stdout=None, stderr=None): |
|||
""" |
|||
Executes a single command. |
|||
""" |
|||
if not self.active: |
|||
return 0 |
|||
if type(command) == type(''): |
|||
command = self.subst(command) |
|||
cmdargs = shlex.split(command) |
|||
if cmdargs[0] == 'cd': |
|||
command = (os.chdir,) + tuple(cmdargs[1:]) |
|||
if type(command) == type(()): |
|||
func = command[0] |
|||
args = command[1:] |
|||
return func(*args) |
|||
else: |
|||
if stdout is sys.stdout: |
|||
# Same as passing sys.stdout, except python2.4 doesn't fail on it. |
|||
subout = None |
|||
else: |
|||
# Open pipe for anything else so Popen works on python2.4. |
|||
subout = subprocess.PIPE |
|||
if stderr is sys.stderr: |
|||
# Same as passing sys.stderr, except python2.4 doesn't fail on it. |
|||
suberr = None |
|||
elif stderr is None: |
|||
# Merge with stdout if stderr isn't specified. |
|||
suberr = subprocess.STDOUT |
|||
else: |
|||
# Open pipe for anything else so Popen works on python2.4. |
|||
suberr = subprocess.PIPE |
|||
p = subprocess.Popen(command, |
|||
shell=(sys.platform == 'win32'), |
|||
stdout=subout, |
|||
stderr=suberr) |
|||
p.wait() |
|||
if stdout is None: |
|||
self.stdout = p.stdout.read() |
|||
elif stdout is not sys.stdout: |
|||
stdout.write(p.stdout.read()) |
|||
if stderr not in (None, sys.stderr): |
|||
stderr.write(p.stderr.read()) |
|||
return p.returncode |
|||
|
|||
def run(self, command, display=None, stdout=None, stderr=None): |
|||
""" |
|||
Runs a single command, displaying it first. |
|||
""" |
|||
if display is None: |
|||
display = command |
|||
self.display(display) |
|||
return self.execute(command, stdout, stderr) |
|||
|
|||
|
|||
class Unbuffered: |
|||
def __init__(self, fp): |
|||
self.fp = fp |
|||
def write(self, arg): |
|||
self.fp.write(arg) |
|||
self.fp.flush() |
|||
def __getattr__(self, attr): |
|||
return getattr(self.fp, attr) |
|||
|
|||
sys.stdout = Unbuffered(sys.stdout) |
|||
sys.stderr = Unbuffered(sys.stderr) |
|||
|
|||
|
|||
def find_all_gyptest_files(directory): |
|||
result = [] |
|||
for root, dirs, files in os.walk(directory): |
|||
if '.svn' in dirs: |
|||
dirs.remove('.svn') |
|||
result.extend([ os.path.join(root, f) for f in files |
|||
if f.startswith('gyptest') and f.endswith('.py') ]) |
|||
result.sort() |
|||
return result |
|||
|
|||
|
|||
def main(argv=None): |
|||
if argv is None: |
|||
argv = sys.argv |
|||
|
|||
usage = "gyptest.py [-ahlnq] [-f formats] [test ...]" |
|||
parser = optparse.OptionParser(usage=usage) |
|||
parser.add_option("-a", "--all", action="store_true", |
|||
help="run all tests") |
|||
parser.add_option("-C", "--chdir", action="store", default=None, |
|||
help="chdir to the specified directory") |
|||
parser.add_option("-f", "--format", action="store", default='', |
|||
help="run tests with the specified formats") |
|||
parser.add_option("-l", "--list", action="store_true", |
|||
help="list available tests and exit") |
|||
parser.add_option("-n", "--no-exec", action="store_true", |
|||
help="no execute, just print the command line") |
|||
parser.add_option("--passed", action="store_true", |
|||
help="report passed tests") |
|||
parser.add_option("--path", action="append", default=[], |
|||
help="additional $PATH directory") |
|||
parser.add_option("-q", "--quiet", action="store_true", |
|||
help="quiet, don't print test command lines") |
|||
opts, args = parser.parse_args(argv[1:]) |
|||
|
|||
if opts.chdir: |
|||
os.chdir(opts.chdir) |
|||
|
|||
if opts.path: |
|||
os.environ['PATH'] += ':' + ':'.join(opts.path) |
|||
|
|||
if not args: |
|||
if not opts.all: |
|||
sys.stderr.write('Specify -a to get all tests.\n') |
|||
return 1 |
|||
args = ['test'] |
|||
|
|||
tests = [] |
|||
for arg in args: |
|||
if os.path.isdir(arg): |
|||
tests.extend(find_all_gyptest_files(os.path.normpath(arg))) |
|||
else: |
|||
tests.append(arg) |
|||
|
|||
if opts.list: |
|||
for test in tests: |
|||
print test |
|||
sys.exit(0) |
|||
|
|||
CommandRunner.verbose = not opts.quiet |
|||
CommandRunner.active = not opts.no_exec |
|||
cr = CommandRunner() |
|||
|
|||
os.environ['PYTHONPATH'] = os.path.abspath('test/lib') |
|||
if not opts.quiet: |
|||
sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH']) |
|||
|
|||
passed = [] |
|||
failed = [] |
|||
no_result = [] |
|||
|
|||
if opts.format: |
|||
format_list = opts.format.split(',') |
|||
else: |
|||
# TODO: not duplicate this mapping from pylib/gyp/__init__.py |
|||
format_list = { |
|||
'freebsd7': ['make'], |
|||
'freebsd8': ['make'], |
|||
'cygwin': ['msvs'], |
|||
'win32': ['msvs'], |
|||
'linux2': ['make'], |
|||
'linux3': ['make'], |
|||
'darwin': ['make', 'xcode'], |
|||
}[sys.platform] |
|||
|
|||
for format in format_list: |
|||
os.environ['TESTGYP_FORMAT'] = format |
|||
if not opts.quiet: |
|||
sys.stdout.write('TESTGYP_FORMAT=%s\n' % format) |
|||
|
|||
for test in tests: |
|||
status = cr.run([sys.executable, test], |
|||
stdout=sys.stdout, |
|||
stderr=sys.stderr) |
|||
if status == 2: |
|||
no_result.append(test) |
|||
elif status: |
|||
failed.append(test) |
|||
else: |
|||
passed.append(test) |
|||
|
|||
if not opts.quiet: |
|||
def report(description, tests): |
|||
if tests: |
|||
if len(tests) == 1: |
|||
sys.stdout.write("\n%s the following test:\n" % description) |
|||
else: |
|||
fmt = "\n%s the following %d tests:\n" |
|||
sys.stdout.write(fmt % (description, len(tests))) |
|||
sys.stdout.write("\t" + "\n\t".join(tests) + "\n") |
|||
|
|||
if opts.passed: |
|||
report("Passed", passed) |
|||
report("Failed", failed) |
|||
report("No result from", no_result) |
|||
|
|||
if failed: |
|||
return 1 |
|||
else: |
|||
return 0 |
|||
|
|||
|
|||
if __name__ == "__main__": |
|||
sys.exit(main()) |
@ -0,0 +1,341 @@ |
|||
#!/usr/bin/python2.4 |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""New implementation of Visual Studio project generation for SCons.""" |
|||
|
|||
import common |
|||
import os |
|||
import random |
|||
|
|||
import gyp.common |
|||
|
|||
# hashlib is supplied as of Python 2.5 as the replacement interface for md5 |
|||
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if |
|||
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise, |
|||
# preserving 2.4 compatibility. |
|||
try: |
|||
import hashlib |
|||
_new_md5 = hashlib.md5 |
|||
except ImportError: |
|||
import md5 |
|||
_new_md5 = md5.new |
|||
|
|||
|
|||
# Initialize random number generator |
|||
random.seed() |
|||
|
|||
# GUIDs for project types |
|||
ENTRY_TYPE_GUIDS = { |
|||
'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}', |
|||
'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}', |
|||
} |
|||
|
|||
#------------------------------------------------------------------------------ |
|||
# Helper functions |
|||
|
|||
|
|||
def MakeGuid(name, seed='msvs_new'): |
|||
"""Returns a GUID for the specified target name. |
|||
|
|||
Args: |
|||
name: Target name. |
|||
seed: Seed for MD5 hash. |
|||
Returns: |
|||
A GUID-line string calculated from the name and seed. |
|||
|
|||
This generates something which looks like a GUID, but depends only on the |
|||
name and seed. This means the same name/seed will always generate the same |
|||
GUID, so that projects and solutions which refer to each other can explicitly |
|||
determine the GUID to refer to explicitly. It also means that the GUID will |
|||
not change when the project for a target is rebuilt. |
|||
""" |
|||
# Calculate a MD5 signature for the seed and name. |
|||
d = _new_md5(str(seed) + str(name)).hexdigest().upper() |
|||
# Convert most of the signature to GUID form (discard the rest) |
|||
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] |
|||
+ '-' + d[20:32] + '}') |
|||
return guid |
|||
|
|||
#------------------------------------------------------------------------------ |
|||
|
|||
|
|||
class MSVSFolder: |
|||
"""Folder in a Visual Studio project or solution.""" |
|||
|
|||
def __init__(self, path, name = None, entries = None, |
|||
guid = None, items = None): |
|||
"""Initializes the folder. |
|||
|
|||
Args: |
|||
path: Full path to the folder. |
|||
name: Name of the folder. |
|||
entries: List of folder entries to nest inside this folder. May contain |
|||
Folder or Project objects. May be None, if the folder is empty. |
|||
guid: GUID to use for folder, if not None. |
|||
items: List of solution items to include in the folder project. May be |
|||
None, if the folder does not directly contain items. |
|||
""" |
|||
if name: |
|||
self.name = name |
|||
else: |
|||
# Use last layer. |
|||
self.name = os.path.basename(path) |
|||
|
|||
self.path = path |
|||
self.guid = guid |
|||
|
|||
# Copy passed lists (or set to empty lists) |
|||
self.entries = list(entries or []) |
|||
self.items = list(items or []) |
|||
|
|||
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] |
|||
|
|||
def get_guid(self): |
|||
if self.guid is None: |
|||
# Use consistent guids for folders (so things don't regenerate). |
|||
self.guid = MakeGuid(self.path, seed='msvs_folder') |
|||
return self.guid |
|||
|
|||
|
|||
#------------------------------------------------------------------------------ |
|||
|
|||
|
|||
class MSVSProject: |
|||
"""Visual Studio project.""" |
|||
|
|||
def __init__(self, path, name = None, dependencies = None, guid = None, |
|||
spec = None, build_file = None, config_platform_overrides = None, |
|||
fixpath_prefix = None): |
|||
"""Initializes the project. |
|||
|
|||
Args: |
|||
path: Absolute path to the project file. |
|||
name: Name of project. If None, the name will be the same as the base |
|||
name of the project file. |
|||
dependencies: List of other Project objects this project is dependent |
|||
upon, if not None. |
|||
guid: GUID to use for project, if not None. |
|||
spec: Dictionary specifying how to build this project. |
|||
build_file: Filename of the .gyp file that the vcproj file comes from. |
|||
config_platform_overrides: optional dict of configuration platforms to |
|||
used in place of the default for this target. |
|||
fixpath_prefix: the path used to adjust the behavior of _fixpath |
|||
""" |
|||
self.path = path |
|||
self.guid = guid |
|||
self.spec = spec |
|||
self.build_file = build_file |
|||
# Use project filename if name not specified |
|||
self.name = name or os.path.splitext(os.path.basename(path))[0] |
|||
|
|||
# Copy passed lists (or set to empty lists) |
|||
self.dependencies = list(dependencies or []) |
|||
|
|||
self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] |
|||
|
|||
if config_platform_overrides: |
|||
self.config_platform_overrides = config_platform_overrides |
|||
else: |
|||
self.config_platform_overrides = {} |
|||
self.fixpath_prefix = fixpath_prefix |
|||
|
|||
def set_dependencies(self, dependencies): |
|||
self.dependencies = list(dependencies or []) |
|||
|
|||
def get_guid(self): |
|||
if self.guid is None: |
|||
# Set GUID from path |
|||
# TODO(rspangler): This is fragile. |
|||
# 1. We can't just use the project filename sans path, since there could |
|||
# be multiple projects with the same base name (for example, |
|||
# foo/unittest.vcproj and bar/unittest.vcproj). |
|||
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project |
|||
# GUID is the same whether it's included from base/base.sln or |
|||
# foo/bar/baz/baz.sln. |
|||
# 3. The GUID needs to be the same each time this builder is invoked, so |
|||
# that we don't need to rebuild the solution when the project changes. |
|||
# 4. We should be able to handle pre-built project files by reading the |
|||
# GUID from the files. |
|||
self.guid = MakeGuid(self.name) |
|||
return self.guid |
|||
|
|||
#------------------------------------------------------------------------------ |
|||
|
|||
|
|||
class MSVSSolution: |
|||
"""Visual Studio solution.""" |
|||
|
|||
def __init__(self, path, version, entries=None, variants=None, |
|||
websiteProperties=True): |
|||
"""Initializes the solution. |
|||
|
|||
Args: |
|||
path: Path to solution file. |
|||
version: Format version to emit. |
|||
entries: List of entries in solution. May contain Folder or Project |
|||
objects. May be None, if the folder is empty. |
|||
variants: List of build variant strings. If none, a default list will |
|||
be used. |
|||
websiteProperties: Flag to decide if the website properties section |
|||
is generated. |
|||
""" |
|||
self.path = path |
|||
self.websiteProperties = websiteProperties |
|||
self.version = version |
|||
|
|||
# Copy passed lists (or set to empty lists) |
|||
self.entries = list(entries or []) |
|||
|
|||
if variants: |
|||
# Copy passed list |
|||
self.variants = variants[:] |
|||
else: |
|||
# Use default |
|||
self.variants = ['Debug|Win32', 'Release|Win32'] |
|||
# TODO(rspangler): Need to be able to handle a mapping of solution config |
|||
# to project config. Should we be able to handle variants being a dict, |
|||
# or add a separate variant_map variable? If it's a dict, we can't |
|||
# guarantee the order of variants since dict keys aren't ordered. |
|||
|
|||
|
|||
# TODO(rspangler): Automatically write to disk for now; should delay until |
|||
# node-evaluation time. |
|||
self.Write() |
|||
|
|||
|
|||
def Write(self, writer=common.WriteOnDiff): |
|||
"""Writes the solution file to disk. |
|||
|
|||
Raises: |
|||
IndexError: An entry appears multiple times. |
|||
""" |
|||
# Walk the entry tree and collect all the folders and projects. |
|||
all_entries = [] |
|||
entries_to_check = self.entries[:] |
|||
while entries_to_check: |
|||
# Pop from the beginning of the list to preserve the user's order. |
|||
e = entries_to_check.pop(0) |
|||
|
|||
# A project or folder can only appear once in the solution's folder tree. |
|||
# This also protects from cycles. |
|||
if e in all_entries: |
|||
#raise IndexError('Entry "%s" appears more than once in solution' % |
|||
# e.name) |
|||
continue |
|||
|
|||
all_entries.append(e) |
|||
|
|||
# If this is a folder, check its entries too. |
|||
if isinstance(e, MSVSFolder): |
|||
entries_to_check += e.entries |
|||
|
|||
# Sort by name then guid (so things are in order on vs2008). |
|||
def NameThenGuid(a, b): |
|||
if a.name < b.name: return -1 |
|||
if a.name > b.name: return 1 |
|||
if a.get_guid() < b.get_guid(): return -1 |
|||
if a.get_guid() > b.get_guid(): return 1 |
|||
return 0 |
|||
|
|||
all_entries = sorted(all_entries, NameThenGuid) |
|||
|
|||
# Open file and print header |
|||
f = writer(self.path) |
|||
f.write('Microsoft Visual Studio Solution File, ' |
|||
'Format Version %s\r\n' % self.version.SolutionVersion()) |
|||
f.write('# %s\r\n' % self.version.Description()) |
|||
|
|||
# Project entries |
|||
sln_root = os.path.split(self.path)[0] |
|||
for e in all_entries: |
|||
relative_path = gyp.common.RelativePath(e.path, sln_root) |
|||
f.write('Project("%s") = "%s", "%s", "%s"\r\n' % ( |
|||
e.entry_type_guid, # Entry type GUID |
|||
e.name, # Folder name |
|||
relative_path.replace('/', '\\'), # Folder name (again) |
|||
e.get_guid(), # Entry GUID |
|||
)) |
|||
|
|||
# TODO(rspangler): Need a way to configure this stuff |
|||
if self.websiteProperties: |
|||
f.write('\tProjectSection(WebsiteProperties) = preProject\r\n' |
|||
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n' |
|||
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n' |
|||
'\tEndProjectSection\r\n') |
|||
|
|||
if isinstance(e, MSVSFolder): |
|||
if e.items: |
|||
f.write('\tProjectSection(SolutionItems) = preProject\r\n') |
|||
for i in e.items: |
|||
f.write('\t\t%s = %s\r\n' % (i, i)) |
|||
f.write('\tEndProjectSection\r\n') |
|||
|
|||
if isinstance(e, MSVSProject): |
|||
if e.dependencies: |
|||
f.write('\tProjectSection(ProjectDependencies) = postProject\r\n') |
|||
for d in e.dependencies: |
|||
f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid())) |
|||
f.write('\tEndProjectSection\r\n') |
|||
|
|||
f.write('EndProject\r\n') |
|||
|
|||
# Global section |
|||
f.write('Global\r\n') |
|||
|
|||
# Configurations (variants) |
|||
f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n') |
|||
for v in self.variants: |
|||
f.write('\t\t%s = %s\r\n' % (v, v)) |
|||
f.write('\tEndGlobalSection\r\n') |
|||
|
|||
# Sort config guids for easier diffing of solution changes. |
|||
config_guids = [] |
|||
config_guids_overrides = {} |
|||
for e in all_entries: |
|||
if isinstance(e, MSVSProject): |
|||
config_guids.append(e.get_guid()) |
|||
config_guids_overrides[e.get_guid()] = e.config_platform_overrides |
|||
config_guids.sort() |
|||
|
|||
f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n') |
|||
for g in config_guids: |
|||
for v in self.variants: |
|||
nv = config_guids_overrides[g].get(v, v) |
|||
# Pick which project configuration to build for this solution |
|||
# configuration. |
|||
f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % ( |
|||
g, # Project GUID |
|||
v, # Solution build configuration |
|||
nv, # Project build config for that solution config |
|||
)) |
|||
|
|||
# Enable project in this solution configuration. |
|||
f.write('\t\t%s.%s.Build.0 = %s\r\n' % ( |
|||
g, # Project GUID |
|||
v, # Solution build configuration |
|||
nv, # Project build config for that solution config |
|||
)) |
|||
f.write('\tEndGlobalSection\r\n') |
|||
|
|||
# TODO(rspangler): Should be able to configure this stuff too (though I've |
|||
# never seen this be any different) |
|||
f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n') |
|||
f.write('\t\tHideSolutionNode = FALSE\r\n') |
|||
f.write('\tEndGlobalSection\r\n') |
|||
|
|||
# Folder mappings |
|||
# TODO(rspangler): Should omit this section if there are no folders |
|||
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n') |
|||
for e in all_entries: |
|||
if not isinstance(e, MSVSFolder): |
|||
continue # Does not apply to projects, only folders |
|||
for subentry in e.entries: |
|||
f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid())) |
|||
f.write('\tEndGlobalSection\r\n') |
|||
|
|||
f.write('EndGlobal\r\n') |
|||
|
|||
f.close() |
@ -0,0 +1,245 @@ |
|||
#!/usr/bin/python2.4 |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""Visual Studio project reader/writer.""" |
|||
|
|||
import common |
|||
import xml.dom |
|||
import xml_fix |
|||
|
|||
#------------------------------------------------------------------------------ |
|||
|
|||
|
|||
class Tool(object): |
|||
"""Visual Studio tool.""" |
|||
|
|||
def __init__(self, name, attrs=None): |
|||
"""Initializes the tool. |
|||
|
|||
Args: |
|||
name: Tool name. |
|||
attrs: Dict of tool attributes; may be None. |
|||
""" |
|||
self.name = name |
|||
self.attrs = attrs or {} |
|||
|
|||
def CreateElement(self, doc): |
|||
"""Creates an element for the tool. |
|||
|
|||
Args: |
|||
doc: xml.dom.Document object to use for node creation. |
|||
|
|||
Returns: |
|||
A new xml.dom.Element for the tool. |
|||
""" |
|||
node = doc.createElement('Tool') |
|||
node.setAttribute('Name', self.name) |
|||
for k, v in self.attrs.items(): |
|||
node.setAttribute(k, v) |
|||
return node |
|||
|
|||
|
|||
class Filter(object): |
|||
"""Visual Studio filter - that is, a virtual folder.""" |
|||
|
|||
def __init__(self, name, contents=None): |
|||
"""Initializes the folder. |
|||
|
|||
Args: |
|||
name: Filter (folder) name. |
|||
contents: List of filenames and/or Filter objects contained. |
|||
""" |
|||
self.name = name |
|||
self.contents = list(contents or []) |
|||
|
|||
|
|||
#------------------------------------------------------------------------------ |
|||
|
|||
|
|||
class Writer(object): |
|||
"""Visual Studio XML project writer.""" |
|||
|
|||
def __init__(self, project_path, version): |
|||
"""Initializes the project. |
|||
|
|||
Args: |
|||
project_path: Path to the project file. |
|||
version: Format version to emit. |
|||
""" |
|||
self.project_path = project_path |
|||
self.doc = None |
|||
self.version = version |
|||
|
|||
def Create(self, name, guid=None, platforms=None): |
|||
"""Creates the project document. |
|||
|
|||
Args: |
|||
name: Name of the project. |
|||
guid: GUID to use for project, if not None. |
|||
""" |
|||
self.name = name |
|||
self.guid = guid |
|||
|
|||
# Default to Win32 for platforms. |
|||
if not platforms: |
|||
platforms = ['Win32'] |
|||
|
|||
# Create XML doc |
|||
xml_impl = xml.dom.getDOMImplementation() |
|||
self.doc = xml_impl.createDocument(None, 'VisualStudioProject', None) |
|||
|
|||
# Add attributes to root element |
|||
self.n_root = self.doc.documentElement |
|||
self.n_root.setAttribute('ProjectType', 'Visual C++') |
|||
self.n_root.setAttribute('Version', self.version.ProjectVersion()) |
|||
self.n_root.setAttribute('Name', self.name) |
|||
self.n_root.setAttribute('ProjectGUID', self.guid) |
|||
self.n_root.setAttribute('RootNamespace', self.name) |
|||
self.n_root.setAttribute('Keyword', 'Win32Proj') |
|||
|
|||
# Add platform list |
|||
n_platform = self.doc.createElement('Platforms') |
|||
self.n_root.appendChild(n_platform) |
|||
for platform in platforms: |
|||
n = self.doc.createElement('Platform') |
|||
n.setAttribute('Name', platform) |
|||
n_platform.appendChild(n) |
|||
|
|||
# Add tool files section |
|||
self.n_tool_files = self.doc.createElement('ToolFiles') |
|||
self.n_root.appendChild(self.n_tool_files) |
|||
|
|||
# Add configurations section |
|||
self.n_configs = self.doc.createElement('Configurations') |
|||
self.n_root.appendChild(self.n_configs) |
|||
|
|||
# Add empty References section |
|||
self.n_root.appendChild(self.doc.createElement('References')) |
|||
|
|||
# Add files section |
|||
self.n_files = self.doc.createElement('Files') |
|||
self.n_root.appendChild(self.n_files) |
|||
# Keep a dict keyed on filename to speed up access. |
|||
self.n_files_dict = dict() |
|||
|
|||
# Add empty Globals section |
|||
self.n_root.appendChild(self.doc.createElement('Globals')) |
|||
|
|||
def AddToolFile(self, path): |
|||
"""Adds a tool file to the project. |
|||
|
|||
Args: |
|||
path: Relative path from project to tool file. |
|||
""" |
|||
n_tool = self.doc.createElement('ToolFile') |
|||
n_tool.setAttribute('RelativePath', path) |
|||
self.n_tool_files.appendChild(n_tool) |
|||
|
|||
def _AddConfigToNode(self, parent, config_type, config_name, attrs=None, |
|||
tools=None): |
|||
"""Adds a configuration to the parent node. |
|||
|
|||
Args: |
|||
parent: Destination node. |
|||
config_type: Type of configuration node. |
|||
config_name: Configuration name. |
|||
attrs: Dict of configuration attributes; may be None. |
|||
tools: List of tools (strings or Tool objects); may be None. |
|||
""" |
|||
# Handle defaults |
|||
if not attrs: |
|||
attrs = {} |
|||
if not tools: |
|||
tools = [] |
|||
|
|||
# Add configuration node and its attributes |
|||
n_config = self.doc.createElement(config_type) |
|||
n_config.setAttribute('Name', config_name) |
|||
for k, v in attrs.items(): |
|||
n_config.setAttribute(k, v) |
|||
parent.appendChild(n_config) |
|||
|
|||
# Add tool nodes and their attributes |
|||
if tools: |
|||
for t in tools: |
|||
if isinstance(t, Tool): |
|||
n_config.appendChild(t.CreateElement(self.doc)) |
|||
else: |
|||
n_config.appendChild(Tool(t).CreateElement(self.doc)) |
|||
|
|||
def AddConfig(self, name, attrs=None, tools=None): |
|||
"""Adds a configuration to the project. |
|||
|
|||
Args: |
|||
name: Configuration name. |
|||
attrs: Dict of configuration attributes; may be None. |
|||
tools: List of tools (strings or Tool objects); may be None. |
|||
""" |
|||
self._AddConfigToNode(self.n_configs, 'Configuration', name, attrs, tools) |
|||
|
|||
def _AddFilesToNode(self, parent, files): |
|||
"""Adds files and/or filters to the parent node. |
|||
|
|||
Args: |
|||
parent: Destination node |
|||
files: A list of Filter objects and/or relative paths to files. |
|||
|
|||
Will call itself recursively, if the files list contains Filter objects. |
|||
""" |
|||
for f in files: |
|||
if isinstance(f, Filter): |
|||
node = self.doc.createElement('Filter') |
|||
node.setAttribute('Name', f.name) |
|||
self._AddFilesToNode(node, f.contents) |
|||
else: |
|||
node = self.doc.createElement('File') |
|||
node.setAttribute('RelativePath', f) |
|||
self.n_files_dict[f] = node |
|||
parent.appendChild(node) |
|||
|
|||
def AddFiles(self, files): |
|||
"""Adds files to the project. |
|||
|
|||
Args: |
|||
files: A list of Filter objects and/or relative paths to files. |
|||
|
|||
This makes a copy of the file/filter tree at the time of this call. If you |
|||
later add files to a Filter object which was passed into a previous call |
|||
to AddFiles(), it will not be reflected in this project. |
|||
""" |
|||
self._AddFilesToNode(self.n_files, files) |
|||
# TODO(rspangler) This also doesn't handle adding files to an existing |
|||
# filter. That is, it doesn't merge the trees. |
|||
|
|||
def AddFileConfig(self, path, config, attrs=None, tools=None): |
|||
"""Adds a configuration to a file. |
|||
|
|||
Args: |
|||
path: Relative path to the file. |
|||
config: Name of configuration to add. |
|||
attrs: Dict of configuration attributes; may be None. |
|||
tools: List of tools (strings or Tool objects); may be None. |
|||
|
|||
Raises: |
|||
ValueError: Relative path does not match any file added via AddFiles(). |
|||
""" |
|||
# Find the file node with the right relative path |
|||
parent = self.n_files_dict.get(path) |
|||
if not parent: |
|||
raise ValueError('AddFileConfig: file "%s" not in project.' % path) |
|||
|
|||
# Add the config to the file node |
|||
self._AddConfigToNode(parent, 'FileConfiguration', config, attrs, tools) |
|||
|
|||
def Write(self, writer=common.WriteOnDiff): |
|||
"""Writes the project file.""" |
|||
f = writer(self.project_path) |
|||
fix = xml_fix.XmlFix() |
|||
self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') |
|||
fix.Cleanup() |
|||
f.close() |
|||
|
|||
#------------------------------------------------------------------------------ |
File diff suppressed because it is too large
File diff suppressed because it is too large
@ -0,0 +1,81 @@ |
|||
#!/usr/bin/python2.4 |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""Visual Studio project reader/writer.""" |
|||
|
|||
import common |
|||
import xml.dom |
|||
import xml_fix |
|||
|
|||
|
|||
#------------------------------------------------------------------------------ |
|||
|
|||
|
|||
class Writer(object): |
|||
"""Visual Studio XML tool file writer.""" |
|||
|
|||
def __init__(self, tool_file_path): |
|||
"""Initializes the tool file. |
|||
|
|||
Args: |
|||
tool_file_path: Path to the tool file. |
|||
""" |
|||
self.tool_file_path = tool_file_path |
|||
self.doc = None |
|||
|
|||
def Create(self, name): |
|||
"""Creates the tool file document. |
|||
|
|||
Args: |
|||
name: Name of the tool file. |
|||
""" |
|||
self.name = name |
|||
|
|||
# Create XML doc |
|||
xml_impl = xml.dom.getDOMImplementation() |
|||
self.doc = xml_impl.createDocument(None, 'VisualStudioToolFile', None) |
|||
|
|||
# Add attributes to root element |
|||
self.n_root = self.doc.documentElement |
|||
self.n_root.setAttribute('Version', '8.00') |
|||
self.n_root.setAttribute('Name', self.name) |
|||
|
|||
# Add rules section |
|||
self.n_rules = self.doc.createElement('Rules') |
|||
self.n_root.appendChild(self.n_rules) |
|||
|
|||
def AddCustomBuildRule(self, name, cmd, description, |
|||
additional_dependencies, |
|||
outputs, extensions): |
|||
"""Adds a rule to the tool file. |
|||
|
|||
Args: |
|||
name: Name of the rule. |
|||
description: Description of the rule. |
|||
cmd: Command line of the rule. |
|||
additional_dependencies: other files which may trigger the rule. |
|||
outputs: outputs of the rule. |
|||
extensions: extensions handled by the rule. |
|||
""" |
|||
n_rule = self.doc.createElement('CustomBuildRule') |
|||
n_rule.setAttribute('Name', name) |
|||
n_rule.setAttribute('ExecutionDescription', description) |
|||
n_rule.setAttribute('CommandLine', cmd) |
|||
n_rule.setAttribute('Outputs', ';'.join(outputs)) |
|||
n_rule.setAttribute('FileExtensions', ';'.join(extensions)) |
|||
n_rule.setAttribute('AdditionalDependencies', |
|||
';'.join(additional_dependencies)) |
|||
self.n_rules.appendChild(n_rule) |
|||
|
|||
def Write(self, writer=common.WriteOnDiff): |
|||
"""Writes the tool file.""" |
|||
f = writer(self.tool_file_path) |
|||
fix = xml_fix.XmlFix() |
|||
self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') |
|||
fix.Cleanup() |
|||
f.close() |
|||
|
|||
#------------------------------------------------------------------------------ |
@ -0,0 +1,182 @@ |
|||
#!/usr/bin/python2.4 |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""Visual Studio user preferences file writer.""" |
|||
|
|||
import common |
|||
import os |
|||
import re |
|||
import socket # for gethostname |
|||
import xml.dom |
|||
import xml_fix |
|||
|
|||
|
|||
#------------------------------------------------------------------------------ |
|||
|
|||
def _FindCommandInPath(command): |
|||
"""If there are no slashes in the command given, this function |
|||
searches the PATH env to find the given command, and converts it |
|||
to an absolute path. We have to do this because MSVS is looking |
|||
for an actual file to launch a debugger on, not just a command |
|||
line. Note that this happens at GYP time, so anything needing to |
|||
be built needs to have a full path.""" |
|||
if '/' in command or '\\' in command: |
|||
# If the command already has path elements (either relative or |
|||
# absolute), then assume it is constructed properly. |
|||
return command |
|||
else: |
|||
# Search through the path list and find an existing file that |
|||
# we can access. |
|||
paths = os.environ.get('PATH','').split(os.pathsep) |
|||
for path in paths: |
|||
item = os.path.join(path, command) |
|||
if os.path.isfile(item) and os.access(item, os.X_OK): |
|||
return item |
|||
return command |
|||
|
|||
def _QuoteWin32CommandLineArgs(args): |
|||
new_args = [] |
|||
for arg in args: |
|||
# Replace all double-quotes with double-double-quotes to escape |
|||
# them for cmd shell, and then quote the whole thing if there |
|||
# are any. |
|||
if arg.find('"') != -1: |
|||
arg = '""'.join(arg.split('"')) |
|||
arg = '"%s"' % arg |
|||
|
|||
# Otherwise, if there are any spaces, quote the whole arg. |
|||
elif re.search(r'[ \t\n]', arg): |
|||
arg = '"%s"' % arg |
|||
new_args.append(arg) |
|||
return new_args |
|||
|
|||
class Writer(object): |
|||
"""Visual Studio XML user user file writer.""" |
|||
|
|||
def __init__(self, user_file_path, version): |
|||
"""Initializes the user file. |
|||
|
|||
Args: |
|||
user_file_path: Path to the user file. |
|||
""" |
|||
self.user_file_path = user_file_path |
|||
self.version = version |
|||
self.doc = None |
|||
|
|||
def Create(self, name): |
|||
"""Creates the user file document. |
|||
|
|||
Args: |
|||
name: Name of the user file. |
|||
""" |
|||
self.name = name |
|||
|
|||
# Create XML doc |
|||
xml_impl = xml.dom.getDOMImplementation() |
|||
self.doc = xml_impl.createDocument(None, 'VisualStudioUserFile', None) |
|||
|
|||
# Add attributes to root element |
|||
self.n_root = self.doc.documentElement |
|||
self.n_root.setAttribute('Version', self.version.ProjectVersion()) |
|||
self.n_root.setAttribute('Name', self.name) |
|||
|
|||
# Add configurations section |
|||
self.n_configs = self.doc.createElement('Configurations') |
|||
self.n_root.appendChild(self.n_configs) |
|||
|
|||
def _AddConfigToNode(self, parent, config_type, config_name): |
|||
"""Adds a configuration to the parent node. |
|||
|
|||
Args: |
|||
parent: Destination node. |
|||
config_type: Type of configuration node. |
|||
config_name: Configuration name. |
|||
""" |
|||
# Add configuration node and its attributes |
|||
n_config = self.doc.createElement(config_type) |
|||
n_config.setAttribute('Name', config_name) |
|||
parent.appendChild(n_config) |
|||
|
|||
def AddConfig(self, name): |
|||
"""Adds a configuration to the project. |
|||
|
|||
Args: |
|||
name: Configuration name. |
|||
""" |
|||
self._AddConfigToNode(self.n_configs, 'Configuration', name) |
|||
|
|||
|
|||
def AddDebugSettings(self, config_name, command, environment = {}, |
|||
working_directory=""): |
|||
"""Adds a DebugSettings node to the user file for a particular config. |
|||
|
|||
Args: |
|||
command: command line to run. First element in the list is the |
|||
executable. All elements of the command will be quoted if |
|||
necessary. |
|||
working_directory: other files which may trigger the rule. (optional) |
|||
""" |
|||
command = _QuoteWin32CommandLineArgs(command) |
|||
|
|||
n_cmd = self.doc.createElement('DebugSettings') |
|||
abs_command = _FindCommandInPath(command[0]) |
|||
n_cmd.setAttribute('Command', abs_command) |
|||
n_cmd.setAttribute('WorkingDirectory', working_directory) |
|||
n_cmd.setAttribute('CommandArguments', " ".join(command[1:])) |
|||
n_cmd.setAttribute('RemoteMachine', socket.gethostname()) |
|||
|
|||
if environment and isinstance(environment, dict): |
|||
n_cmd.setAttribute('Environment', |
|||
" ".join(['%s="%s"' % (key, val) |
|||
for (key,val) in environment.iteritems()])) |
|||
else: |
|||
n_cmd.setAttribute('Environment', '') |
|||
|
|||
n_cmd.setAttribute('EnvironmentMerge', 'true') |
|||
|
|||
# Currently these are all "dummy" values that we're just setting |
|||
# in the default manner that MSVS does it. We could use some of |
|||
# these to add additional capabilities, I suppose, but they might |
|||
# not have parity with other platforms then. |
|||
n_cmd.setAttribute('Attach', 'false') |
|||
n_cmd.setAttribute('DebuggerType', '3') # 'auto' debugger |
|||
n_cmd.setAttribute('Remote', '1') |
|||
n_cmd.setAttribute('RemoteCommand', '') |
|||
n_cmd.setAttribute('HttpUrl', '') |
|||
n_cmd.setAttribute('PDBPath', '') |
|||
n_cmd.setAttribute('SQLDebugging', '') |
|||
n_cmd.setAttribute('DebuggerFlavor', '0') |
|||
n_cmd.setAttribute('MPIRunCommand', '') |
|||
n_cmd.setAttribute('MPIRunArguments', '') |
|||
n_cmd.setAttribute('MPIRunWorkingDirectory', '') |
|||
n_cmd.setAttribute('ApplicationCommand', '') |
|||
n_cmd.setAttribute('ApplicationArguments', '') |
|||
n_cmd.setAttribute('ShimCommand', '') |
|||
n_cmd.setAttribute('MPIAcceptMode', '') |
|||
n_cmd.setAttribute('MPIAcceptFilter', '') |
|||
|
|||
# Find the config, and add it if it doesn't exist. |
|||
found = False |
|||
for config in self.n_configs.childNodes: |
|||
if config.getAttribute("Name") == config_name: |
|||
found = True |
|||
|
|||
if not found: |
|||
self.AddConfig(config_name) |
|||
|
|||
# Add the DebugSettings onto the appropriate config. |
|||
for config in self.n_configs.childNodes: |
|||
if config.getAttribute("Name") == config_name: |
|||
config.appendChild(n_cmd) |
|||
break |
|||
|
|||
def Write(self, writer=common.WriteOnDiff): |
|||
"""Writes the user file.""" |
|||
f = writer(self.user_file_path) |
|||
self.doc.writexml(f, encoding='Windows-1252', addindent=' ', newl='\r\n') |
|||
f.close() |
|||
|
|||
#------------------------------------------------------------------------------ |
@ -0,0 +1,262 @@ |
|||
#!/usr/bin/python |
|||
# Copyright (c) 2011 The Chromium Authors. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""Handle version information related to Visual Stuio.""" |
|||
|
|||
import errno |
|||
import os |
|||
import re |
|||
import subprocess |
|||
import sys |
|||
|
|||
|
|||
class VisualStudioVersion: |
|||
"""Information regarding a version of Visual Studio.""" |
|||
|
|||
def __init__(self, short_name, description, |
|||
solution_version, project_version, flat_sln, uses_vcxproj): |
|||
self.short_name = short_name |
|||
self.description = description |
|||
self.solution_version = solution_version |
|||
self.project_version = project_version |
|||
self.flat_sln = flat_sln |
|||
self.uses_vcxproj = uses_vcxproj |
|||
|
|||
def ShortName(self): |
|||
return self.short_name |
|||
|
|||
def Description(self): |
|||
"""Get the full description of the version.""" |
|||
return self.description |
|||
|
|||
def SolutionVersion(self): |
|||
"""Get the version number of the sln files.""" |
|||
return self.solution_version |
|||
|
|||
def ProjectVersion(self): |
|||
"""Get the version number of the vcproj or vcxproj files.""" |
|||
return self.project_version |
|||
|
|||
def FlatSolution(self): |
|||
return self.flat_sln |
|||
|
|||
def UsesVcxproj(self): |
|||
"""Returns true if this version uses a vcxproj file.""" |
|||
return self.uses_vcxproj |
|||
|
|||
def ProjectExtension(self): |
|||
"""Returns the file extension for the project.""" |
|||
return self.uses_vcxproj and '.vcxproj' or '.vcproj' |
|||
|
|||
def _RegistryQueryBase(sysdir, key, value): |
|||
"""Use reg.exe to read a particular key. |
|||
|
|||
While ideally we might use the win32 module, we would like gyp to be |
|||
python neutral, so for instance cygwin python lacks this module. |
|||
|
|||
Arguments: |
|||
sysdir: The system subdirectory to attempt to launch reg.exe from. |
|||
key: The registry key to read from. |
|||
value: The particular value to read. |
|||
Return: |
|||
stdout from reg.exe, or None for failure. |
|||
""" |
|||
# Skip if not on Windows or Python Win32 setup issue |
|||
if sys.platform not in ('win32', 'cygwin'): |
|||
return None |
|||
# Setup params to pass to and attempt to launch reg.exe |
|||
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'), |
|||
'query', key] |
|||
if value: |
|||
cmd.extend(['/v', value]) |
|||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
|||
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid |
|||
# Note that the error text may be in [1] in some cases |
|||
text = p.communicate()[0] |
|||
# Check return code from reg.exe; officially 0==success and 1==error |
|||
if p.returncode: |
|||
return None |
|||
return text |
|||
|
|||
def _RegistryQuery(key, value=None): |
|||
"""Use reg.exe to read a particular key through _RegistryQueryBase. |
|||
|
|||
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If |
|||
that fails, it falls back to System32. Sysnative is available on Vista and |
|||
up and available on Windows Server 2003 and XP through KB patch 942589. Note |
|||
that Sysnative will always fail if using 64-bit python due to it being a |
|||
virtual directory and System32 will work correctly in the first place. |
|||
|
|||
KB 942589 - http://support.microsoft.com/kb/942589/en-us. |
|||
|
|||
Arguments: |
|||
key: The registry key. |
|||
value: The particular registry value to read (optional). |
|||
Return: |
|||
stdout from reg.exe, or None for failure. |
|||
""" |
|||
text = None |
|||
try: |
|||
text = _RegistryQueryBase('Sysnative', key, value) |
|||
except OSError, e: |
|||
if e.errno == errno.ENOENT: |
|||
text = _RegistryQueryBase('System32', key, value) |
|||
else: |
|||
raise |
|||
return text |
|||
|
|||
def _RegistryGetValue(key, value): |
|||
"""Use reg.exe to obtain the value of a registry key. |
|||
|
|||
Args: |
|||
key: The registry key. |
|||
value: The particular registry value to read. |
|||
Return: |
|||
contents of the registry key's value, or None on failure. |
|||
""" |
|||
text = _RegistryQuery(key, value) |
|||
if not text: |
|||
return None |
|||
# Extract value. |
|||
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text) |
|||
if not match: |
|||
return None |
|||
return match.group(1) |
|||
|
|||
def _RegistryKeyExists(key): |
|||
"""Use reg.exe to see if a key exists. |
|||
|
|||
Args: |
|||
key: The registry key to check. |
|||
Return: |
|||
True if the key exists |
|||
""" |
|||
if not _RegistryQuery(key): |
|||
return False |
|||
return True |
|||
|
|||
|
|||
def _CreateVersion(name): |
|||
"""Sets up MSVS project generation. |
|||
|
|||
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is |
|||
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is |
|||
passed in that doesn't match a value in versions python will throw a error. |
|||
""" |
|||
versions = { |
|||
'2010': VisualStudioVersion('2010', |
|||
'Visual Studio 2010', |
|||
solution_version='11.00', |
|||
project_version='4.0', |
|||
flat_sln=False, |
|||
uses_vcxproj=True), |
|||
'2010e': VisualStudioVersion('2010e', |
|||
'Visual Studio 2010', |
|||
solution_version='11.00', |
|||
project_version='4.0', |
|||
flat_sln=True, |
|||
uses_vcxproj=True), |
|||
'2008': VisualStudioVersion('2008', |
|||
'Visual Studio 2008', |
|||
solution_version='10.00', |
|||
project_version='9.00', |
|||
flat_sln=False, |
|||
uses_vcxproj=False), |
|||
'2008e': VisualStudioVersion('2008e', |
|||
'Visual Studio 2008', |
|||
solution_version='10.00', |
|||
project_version='9.00', |
|||
flat_sln=True, |
|||
uses_vcxproj=False), |
|||
'2005': VisualStudioVersion('2005', |
|||
'Visual Studio 2005', |
|||
solution_version='9.00', |
|||
project_version='8.00', |
|||
flat_sln=False, |
|||
uses_vcxproj=False), |
|||
'2005e': VisualStudioVersion('2005e', |
|||
'Visual Studio 2005', |
|||
solution_version='9.00', |
|||
project_version='8.00', |
|||
flat_sln=True, |
|||
uses_vcxproj=False), |
|||
} |
|||
return versions[str(name)] |
|||
|
|||
|
|||
def _DetectVisualStudioVersions(): |
|||
"""Collect the list of installed visual studio versions. |
|||
|
|||
Returns: |
|||
A list of visual studio versions installed in descending order of |
|||
usage preference. |
|||
Base this on the registry and a quick check if devenv.exe exists. |
|||
Only versions 8-10 are considered. |
|||
Possibilities are: |
|||
2005(e) - Visual Studio 2005 (8) |
|||
2008(e) - Visual Studio 2008 (9) |
|||
2010(e) - Visual Studio 2010 (10) |
|||
Where (e) is e for express editions of MSVS and blank otherwise. |
|||
""" |
|||
version_to_year = {'8.0': '2005', '9.0': '2008', '10.0': '2010'} |
|||
versions = [] |
|||
# For now, prefer versions before VS2010 |
|||
for version in ('9.0', '8.0', '10.0'): |
|||
# Check if VS2010 and later is installed as specified by |
|||
# http://msdn.microsoft.com/en-us/library/bb164659.aspx |
|||
keys = [r'HKLM\SOFTWARE\Microsoft\DevDiv\VS\Servicing\%s' % version, |
|||
r'HKLM\SOFTWARE\Wow6432Node\Microsoft\DevDiv\VS\Servicing\%s' % ( |
|||
version)] |
|||
for index in range(len(keys)): |
|||
if not _RegistryKeyExists(keys[index]): |
|||
continue |
|||
# Check for express |
|||
if _RegistryKeyExists(keys[index] + '\\expbsln'): |
|||
# Add this one |
|||
versions.append(_CreateVersion(version_to_year[version] + 'e')) |
|||
else: |
|||
# Add this one |
|||
versions.append(_CreateVersion(version_to_year[version])) |
|||
|
|||
# Old (pre-VS2010) method of searching for which VS version is installed |
|||
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version, |
|||
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version, |
|||
r'HKLM\Software\Microsoft\VCExpress\%s' % version, |
|||
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version] |
|||
for index in range(len(keys)): |
|||
path = _RegistryGetValue(keys[index], 'InstallDir') |
|||
if not path: |
|||
continue |
|||
# Check for full. |
|||
if os.path.exists(os.path.join(path, 'devenv.exe')): |
|||
# Add this one. |
|||
versions.append(_CreateVersion(version_to_year[version])) |
|||
# Check for express. |
|||
elif os.path.exists(os.path.join(path, 'vcexpress.exe')): |
|||
# Add this one. |
|||
versions.append(_CreateVersion(version_to_year[version] + 'e')) |
|||
return versions |
|||
|
|||
|
|||
def SelectVisualStudioVersion(version='auto'): |
|||
"""Select which version of Visual Studio projects to generate. |
|||
|
|||
Arguments: |
|||
version: Hook to allow caller to force a particular version (vs auto). |
|||
Returns: |
|||
An object representing a visual studio project format version. |
|||
""" |
|||
# In auto mode, check environment variable for override. |
|||
if version == 'auto': |
|||
version = os.environ.get('GYP_MSVS_VERSION', 'auto') |
|||
# In auto mode, pick the most preferred version present. |
|||
if version == 'auto': |
|||
versions = _DetectVisualStudioVersions() |
|||
if not versions: |
|||
# Default to 2005. |
|||
return _CreateVersion('2005') |
|||
return versions[0] |
|||
# Convert version string into a version object. |
|||
return _CreateVersion(version) |
@ -0,0 +1,200 @@ |
|||
#!/usr/bin/env python |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
""" |
|||
SCons generator. |
|||
|
|||
This contains class definitions and supporting functions for generating |
|||
pieces of SCons files for the different types of GYP targets. |
|||
""" |
|||
|
|||
import os |
|||
|
|||
|
|||
def WriteList(fp, list, prefix='', |
|||
separator=',\n ', |
|||
preamble=None, |
|||
postamble=None): |
|||
fp.write(preamble or '') |
|||
fp.write((separator or ' ').join([prefix + l for l in list])) |
|||
fp.write(postamble or '') |
|||
|
|||
|
|||
class TargetBase(object): |
|||
""" |
|||
Base class for a SCons representation of a GYP target. |
|||
""" |
|||
is_ignored = False |
|||
target_prefix = '' |
|||
target_suffix = '' |
|||
def __init__(self, spec): |
|||
self.spec = spec |
|||
def full_product_name(self): |
|||
""" |
|||
Returns the full name of the product being built: |
|||
|
|||
* Uses 'product_name' if it's set, else prefix + 'target_name'. |
|||
* Prepends 'product_dir' if set. |
|||
* Appends SCons suffix variables for the target type (or |
|||
product_extension). |
|||
""" |
|||
suffix = self.target_suffix |
|||
product_extension = self.spec.get('product_extension') |
|||
if product_extension: |
|||
suffix = '.' + product_extension |
|||
prefix = self.spec.get('product_prefix', self.target_prefix) |
|||
name = self.spec['target_name'] |
|||
name = prefix + self.spec.get('product_name', name) + suffix |
|||
product_dir = self.spec.get('product_dir') |
|||
if product_dir: |
|||
name = os.path.join(product_dir, name) |
|||
else: |
|||
name = os.path.join(self.out_dir, name) |
|||
return name |
|||
|
|||
def write_input_files(self, fp): |
|||
""" |
|||
Writes the definition of the input files (sources). |
|||
""" |
|||
sources = self.spec.get('sources') |
|||
if not sources: |
|||
fp.write('\ninput_files = []\n') |
|||
return |
|||
preamble = '\ninput_files = [\n ' |
|||
postamble = ',\n]\n' |
|||
WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble) |
|||
|
|||
def builder_call(self): |
|||
""" |
|||
Returns the actual SCons builder call to build this target. |
|||
""" |
|||
name = self.full_product_name() |
|||
return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name) |
|||
def write_target(self, fp, src_dir='', pre=''): |
|||
""" |
|||
Writes the lines necessary to build this target. |
|||
""" |
|||
fp.write('\n' + pre) |
|||
fp.write('_outputs = %s\n' % self.builder_call()) |
|||
fp.write('target_files.extend(_outputs)\n') |
|||
|
|||
|
|||
class NoneTarget(TargetBase): |
|||
""" |
|||
A GYP target type of 'none', implicitly or explicitly. |
|||
""" |
|||
def write_target(self, fp, pre=''): |
|||
fp.write('\ntarget_files.extend(input_files)\n') |
|||
|
|||
|
|||
class SettingsTarget(TargetBase): |
|||
""" |
|||
A GYP target type of 'settings'. |
|||
""" |
|||
is_ignored = True |
|||
|
|||
|
|||
compilable_sources_template = """ |
|||
_result = [] |
|||
for infile in input_files: |
|||
if env.compilable(infile): |
|||
if (type(infile) == type('') |
|||
and (infile.startswith(%(src_dir)r) |
|||
or not os.path.isabs(env.subst(infile)))): |
|||
# Force files below the build directory by replacing all '..' |
|||
# elements in the path with '__': |
|||
base, ext = os.path.splitext(os.path.normpath(infile)) |
|||
base = [d == '..' and '__' or d for d in base.split('/')] |
|||
base = os.path.join(*base) |
|||
object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base |
|||
if not infile.startswith(%(src_dir)r): |
|||
infile = %(src_dir)r + infile |
|||
infile = env.%(name)s(object, infile)[0] |
|||
else: |
|||
infile = env.%(name)s(infile)[0] |
|||
_result.append(infile) |
|||
input_files = _result |
|||
""" |
|||
|
|||
class CompilableSourcesTargetBase(TargetBase): |
|||
""" |
|||
An abstract base class for targets that compile their source files. |
|||
|
|||
We explicitly transform compilable files into object files, |
|||
even though SCons could infer that for us, because we want |
|||
to control where the object file ends up. (The implicit rules |
|||
in SCons always put the object file next to the source file.) |
|||
""" |
|||
intermediate_builder_name = None |
|||
def write_target(self, fp, src_dir='', pre=''): |
|||
if self.intermediate_builder_name is None: |
|||
raise NotImplementedError |
|||
if src_dir and not src_dir.endswith('/'): |
|||
src_dir += '/' |
|||
variables = { |
|||
'src_dir': src_dir, |
|||
'name': self.intermediate_builder_name, |
|||
} |
|||
fp.write(compilable_sources_template % variables) |
|||
super(CompilableSourcesTargetBase, self).write_target(fp) |
|||
|
|||
|
|||
class ProgramTarget(CompilableSourcesTargetBase): |
|||
""" |
|||
A GYP target type of 'executable'. |
|||
""" |
|||
builder_name = 'GypProgram' |
|||
intermediate_builder_name = 'StaticObject' |
|||
target_prefix = '${PROGPREFIX}' |
|||
target_suffix = '${PROGSUFFIX}' |
|||
out_dir = '${TOP_BUILDDIR}' |
|||
|
|||
|
|||
class StaticLibraryTarget(CompilableSourcesTargetBase): |
|||
""" |
|||
A GYP target type of 'static_library'. |
|||
""" |
|||
builder_name = 'GypStaticLibrary' |
|||
intermediate_builder_name = 'StaticObject' |
|||
target_prefix = '${LIBPREFIX}' |
|||
target_suffix = '${LIBSUFFIX}' |
|||
out_dir = '${LIB_DIR}' |
|||
|
|||
|
|||
class SharedLibraryTarget(CompilableSourcesTargetBase): |
|||
""" |
|||
A GYP target type of 'shared_library'. |
|||
""" |
|||
builder_name = 'GypSharedLibrary' |
|||
intermediate_builder_name = 'SharedObject' |
|||
target_prefix = '${SHLIBPREFIX}' |
|||
target_suffix = '${SHLIBSUFFIX}' |
|||
out_dir = '${LIB_DIR}' |
|||
|
|||
|
|||
class LoadableModuleTarget(CompilableSourcesTargetBase): |
|||
""" |
|||
A GYP target type of 'loadable_module'. |
|||
""" |
|||
builder_name = 'GypLoadableModule' |
|||
intermediate_builder_name = 'SharedObject' |
|||
target_prefix = '${SHLIBPREFIX}' |
|||
target_suffix = '${SHLIBSUFFIX}' |
|||
out_dir = '${TOP_BUILDDIR}' |
|||
|
|||
|
|||
TargetMap = { |
|||
None : NoneTarget, |
|||
'none' : NoneTarget, |
|||
'settings' : SettingsTarget, |
|||
'executable' : ProgramTarget, |
|||
'static_library' : StaticLibraryTarget, |
|||
'shared_library' : SharedLibraryTarget, |
|||
'loadable_module' : LoadableModuleTarget, |
|||
} |
|||
|
|||
def Target(spec): |
|||
return TargetMap[spec.get('type')](spec) |
@ -0,0 +1,479 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
import copy |
|||
import gyp.input |
|||
import optparse |
|||
import os.path |
|||
import re |
|||
import shlex |
|||
import sys |
|||
|
|||
# Default debug modes for GYP |
|||
debug = {} |
|||
|
|||
# List of "official" debug modes, but you can use anything you like. |
|||
DEBUG_GENERAL = 'general' |
|||
DEBUG_VARIABLES = 'variables' |
|||
DEBUG_INCLUDES = 'includes' |
|||
|
|||
def DebugOutput(mode, message): |
|||
if mode in gyp.debug.keys(): |
|||
print "%s: %s" % (mode.upper(), message) |
|||
|
|||
def FindBuildFiles(): |
|||
extension = '.gyp' |
|||
files = os.listdir(os.getcwd()) |
|||
build_files = [] |
|||
for file in files: |
|||
if file[-len(extension):] == extension: |
|||
build_files.append(file) |
|||
return build_files |
|||
|
|||
|
|||
def Load(build_files, format, default_variables={}, |
|||
includes=[], depth='.', params=None, check=False, circular_check=True): |
|||
""" |
|||
Loads one or more specified build files. |
|||
default_variables and includes will be copied before use. |
|||
Returns the generator for the specified format and the |
|||
data returned by loading the specified build files. |
|||
""" |
|||
if params is None: |
|||
params = {} |
|||
|
|||
flavor = None |
|||
if '-' in format: |
|||
format, params['flavor'] = format.split('-', 1) |
|||
|
|||
default_variables = copy.copy(default_variables) |
|||
|
|||
# Default variables provided by this program and its modules should be |
|||
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, |
|||
# avoiding collisions with user and automatic variables. |
|||
default_variables['GENERATOR'] = format |
|||
|
|||
generator_name = 'gyp.generator.' + format |
|||
# These parameters are passed in order (as opposed to by key) |
|||
# because ActivePython cannot handle key parameters to __import__. |
|||
generator = __import__(generator_name, globals(), locals(), generator_name) |
|||
for (key, val) in generator.generator_default_variables.items(): |
|||
default_variables.setdefault(key, val) |
|||
|
|||
# Give the generator the opportunity to set additional variables based on |
|||
# the params it will receive in the output phase. |
|||
if getattr(generator, 'CalculateVariables', None): |
|||
generator.CalculateVariables(default_variables, params) |
|||
|
|||
# Give the generator the opportunity to set generator_input_info based on |
|||
# the params it will receive in the output phase. |
|||
if getattr(generator, 'CalculateGeneratorInputInfo', None): |
|||
generator.CalculateGeneratorInputInfo(params) |
|||
|
|||
# Fetch the generator specific info that gets fed to input, we use getattr |
|||
# so we can default things and the generators only have to provide what |
|||
# they need. |
|||
generator_input_info = { |
|||
'generator_wants_absolute_build_file_paths': |
|||
getattr(generator, 'generator_wants_absolute_build_file_paths', False), |
|||
'generator_handles_variants': |
|||
getattr(generator, 'generator_handles_variants', False), |
|||
'non_configuration_keys': |
|||
getattr(generator, 'generator_additional_non_configuration_keys', []), |
|||
'path_sections': |
|||
getattr(generator, 'generator_additional_path_sections', []), |
|||
'extra_sources_for_rules': |
|||
getattr(generator, 'generator_extra_sources_for_rules', []), |
|||
'generator_supports_multiple_toolsets': |
|||
getattr(generator, 'generator_supports_multiple_toolsets', False), |
|||
'generator_wants_static_library_dependencies_adjusted': |
|||
getattr(generator, |
|||
'generator_wants_static_library_dependencies_adjusted', True), |
|||
'generator_wants_sorted_dependencies': |
|||
getattr(generator, 'generator_wants_sorted_dependencies', False), |
|||
} |
|||
|
|||
# Process the input specific to this generator. |
|||
result = gyp.input.Load(build_files, default_variables, includes[:], |
|||
depth, generator_input_info, check, circular_check) |
|||
return [generator] + result |
|||
|
|||
def NameValueListToDict(name_value_list): |
|||
""" |
|||
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary |
|||
of the pairs. If a string is simply NAME, then the value in the dictionary |
|||
is set to True. If VALUE can be converted to an integer, it is. |
|||
""" |
|||
result = { } |
|||
for item in name_value_list: |
|||
tokens = item.split('=', 1) |
|||
if len(tokens) == 2: |
|||
# If we can make it an int, use that, otherwise, use the string. |
|||
try: |
|||
token_value = int(tokens[1]) |
|||
except ValueError: |
|||
token_value = tokens[1] |
|||
# Set the variable to the supplied value. |
|||
result[tokens[0]] = token_value |
|||
else: |
|||
# No value supplied, treat it as a boolean and set it. |
|||
result[tokens[0]] = True |
|||
return result |
|||
|
|||
def ShlexEnv(env_name): |
|||
flags = os.environ.get(env_name, []) |
|||
if flags: |
|||
flags = shlex.split(flags) |
|||
return flags |
|||
|
|||
def FormatOpt(opt, value): |
|||
if opt.startswith('--'): |
|||
return '%s=%s' % (opt, value) |
|||
return opt + value |
|||
|
|||
def RegenerateAppendFlag(flag, values, predicate, env_name, options): |
|||
"""Regenerate a list of command line flags, for an option of action='append'. |
|||
|
|||
The |env_name|, if given, is checked in the environment and used to generate |
|||
an initial list of options, then the options that were specified on the |
|||
command line (given in |values|) are appended. This matches the handling of |
|||
environment variables and command line flags where command line flags override |
|||
the environment, while not requiring the environment to be set when the flags |
|||
are used again. |
|||
""" |
|||
flags = [] |
|||
if options.use_environment and env_name: |
|||
for flag_value in ShlexEnv(env_name): |
|||
flags.append(FormatOpt(flag, predicate(flag_value))) |
|||
if values: |
|||
for flag_value in values: |
|||
flags.append(FormatOpt(flag, predicate(flag_value))) |
|||
return flags |
|||
|
|||
def RegenerateFlags(options): |
|||
"""Given a parsed options object, and taking the environment variables into |
|||
account, returns a list of flags that should regenerate an equivalent options |
|||
object (even in the absence of the environment variables.) |
|||
|
|||
Any path options will be normalized relative to depth. |
|||
|
|||
The format flag is not included, as it is assumed the calling generator will |
|||
set that as appropriate. |
|||
""" |
|||
def FixPath(path): |
|||
path = gyp.common.FixIfRelativePath(path, options.depth) |
|||
if not path: |
|||
return os.path.curdir |
|||
return path |
|||
|
|||
def Noop(value): |
|||
return value |
|||
|
|||
# We always want to ignore the environment when regenerating, to avoid |
|||
# duplicate or changed flags in the environment at the time of regeneration. |
|||
flags = ['--ignore-environment'] |
|||
for name, metadata in options._regeneration_metadata.iteritems(): |
|||
opt = metadata['opt'] |
|||
value = getattr(options, name) |
|||
value_predicate = metadata['type'] == 'path' and FixPath or Noop |
|||
action = metadata['action'] |
|||
env_name = metadata['env_name'] |
|||
if action == 'append': |
|||
flags.extend(RegenerateAppendFlag(opt, value, value_predicate, |
|||
env_name, options)) |
|||
elif action in ('store', None): # None is a synonym for 'store'. |
|||
if value: |
|||
flags.append(FormatOpt(opt, value_predicate(value))) |
|||
elif options.use_environment and env_name and os.environ.get(env_name): |
|||
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) |
|||
elif action in ('store_true', 'store_false'): |
|||
if ((action == 'store_true' and value) or |
|||
(action == 'store_false' and not value)): |
|||
flags.append(opt) |
|||
elif options.use_environment and env_name: |
|||
print >>sys.stderr, ('Warning: environment regeneration unimplemented ' |
|||
'for %s flag %r env_name %r' % (action, opt, |
|||
env_name)) |
|||
else: |
|||
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' |
|||
'flag %r' % (action, opt)) |
|||
|
|||
return flags |
|||
|
|||
class RegeneratableOptionParser(optparse.OptionParser): |
|||
def __init__(self): |
|||
self.__regeneratable_options = {} |
|||
optparse.OptionParser.__init__(self) |
|||
|
|||
def add_option(self, *args, **kw): |
|||
"""Add an option to the parser. |
|||
|
|||
This accepts the same arguments as OptionParser.add_option, plus the |
|||
following: |
|||
regenerate: can be set to False to prevent this option from being included |
|||
in regeneration. |
|||
env_name: name of environment variable that additional values for this |
|||
option come from. |
|||
type: adds type='path', to tell the regenerator that the values of |
|||
this option need to be made relative to options.depth |
|||
""" |
|||
env_name = kw.pop('env_name', None) |
|||
if 'dest' in kw and kw.pop('regenerate', True): |
|||
dest = kw['dest'] |
|||
|
|||
# The path type is needed for regenerating, for optparse we can just treat |
|||
# it as a string. |
|||
type = kw.get('type') |
|||
if type == 'path': |
|||
kw['type'] = 'string' |
|||
|
|||
self.__regeneratable_options[dest] = { |
|||
'action': kw.get('action'), |
|||
'type': type, |
|||
'env_name': env_name, |
|||
'opt': args[0], |
|||
} |
|||
|
|||
optparse.OptionParser.add_option(self, *args, **kw) |
|||
|
|||
def parse_args(self, *args): |
|||
values, args = optparse.OptionParser.parse_args(self, *args) |
|||
values._regeneration_metadata = self.__regeneratable_options |
|||
return values, args |
|||
|
|||
def main(args): |
|||
my_name = os.path.basename(sys.argv[0]) |
|||
|
|||
parser = RegeneratableOptionParser() |
|||
usage = 'usage: %s [options ...] [build_file ...]' |
|||
parser.set_usage(usage.replace('%s', '%prog')) |
|||
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', |
|||
env_name='GYP_DEFINES', |
|||
help='sets variable VAR to value VAL') |
|||
parser.add_option('-f', '--format', dest='formats', action='append', |
|||
env_name='GYP_GENERATORS', regenerate=False, |
|||
help='output formats to generate') |
|||
parser.add_option('--msvs-version', dest='msvs_version', |
|||
regenerate=False, |
|||
help='Deprecated; use -G msvs_version=MSVS_VERSION instead') |
|||
parser.add_option('-I', '--include', dest='includes', action='append', |
|||
metavar='INCLUDE', type='path', |
|||
help='files to include in all loaded .gyp files') |
|||
parser.add_option('--depth', dest='depth', metavar='PATH', type='path', |
|||
help='set DEPTH gyp variable to a relative path to PATH') |
|||
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', |
|||
action='append', default=[], help='turn on a debugging ' |
|||
'mode for debugging GYP. Supported modes are "variables" ' |
|||
'and "general"') |
|||
parser.add_option('-S', '--suffix', dest='suffix', default='', |
|||
help='suffix to add to generated files') |
|||
parser.add_option('-G', dest='generator_flags', action='append', default=[], |
|||
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', |
|||
help='sets generator flag FLAG to VAL') |
|||
parser.add_option('--generator-output', dest='generator_output', |
|||
action='store', default=None, metavar='DIR', type='path', |
|||
env_name='GYP_GENERATOR_OUTPUT', |
|||
help='puts generated build files under DIR') |
|||
parser.add_option('--ignore-environment', dest='use_environment', |
|||
action='store_false', default=True, regenerate=False, |
|||
help='do not read options from environment variables') |
|||
parser.add_option('--check', dest='check', action='store_true', |
|||
help='check format of gyp files') |
|||
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', |
|||
default=None, metavar='DIR', type='path', |
|||
help='directory to use as the root of the source tree') |
|||
# --no-circular-check disables the check for circular relationships between |
|||
# .gyp files. These relationships should not exist, but they've only been |
|||
# observed to be harmful with the Xcode generator. Chromium's .gyp files |
|||
# currently have some circular relationships on non-Mac platforms, so this |
|||
# option allows the strict behavior to be used on Macs and the lenient |
|||
# behavior to be used elsewhere. |
|||
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed. |
|||
parser.add_option('--no-circular-check', dest='circular_check', |
|||
action='store_false', default=True, regenerate=False, |
|||
help="don't check for circular relationships between files") |
|||
|
|||
# We read a few things from ~/.gyp, so set up a var for that. |
|||
home_vars = ['HOME'] |
|||
if sys.platform in ('cygwin', 'win32'): |
|||
home_vars.append('USERPROFILE') |
|||
home = None |
|||
home_dot_gyp = None |
|||
for home_var in home_vars: |
|||
home = os.getenv(home_var) |
|||
if home != None: |
|||
home_dot_gyp = os.path.join(home, '.gyp') |
|||
if not os.path.exists(home_dot_gyp): |
|||
home_dot_gyp = None |
|||
else: |
|||
break |
|||
|
|||
# TODO(thomasvl): add support for ~/.gyp/defaults |
|||
|
|||
options, build_files_arg = parser.parse_args(args) |
|||
build_files = build_files_arg |
|||
|
|||
if not options.formats: |
|||
# If no format was given on the command line, then check the env variable. |
|||
generate_formats = [] |
|||
if options.use_environment: |
|||
generate_formats = os.environ.get('GYP_GENERATORS', []) |
|||
if generate_formats: |
|||
generate_formats = re.split('[\s,]', generate_formats) |
|||
if generate_formats: |
|||
options.formats = generate_formats |
|||
else: |
|||
# Nothing in the variable, default based on platform. |
|||
options.formats = [ {'darwin': 'xcode', |
|||
'win32': 'msvs', |
|||
'cygwin': 'msvs', |
|||
'freebsd7': 'make', |
|||
'freebsd8': 'make', |
|||
'linux2': 'make', |
|||
'linux3': 'make', |
|||
'openbsd4': 'make', |
|||
'sunos5': 'make',}[sys.platform] ] |
|||
|
|||
if not options.generator_output and options.use_environment: |
|||
g_o = os.environ.get('GYP_GENERATOR_OUTPUT') |
|||
if g_o: |
|||
options.generator_output = g_o |
|||
|
|||
for mode in options.debug: |
|||
gyp.debug[mode] = 1 |
|||
|
|||
# Do an extra check to avoid work when we're not debugging. |
|||
if DEBUG_GENERAL in gyp.debug.keys(): |
|||
DebugOutput(DEBUG_GENERAL, 'running with these options:') |
|||
for option, value in sorted(options.__dict__.items()): |
|||
if option[0] == '_': |
|||
continue |
|||
if isinstance(value, basestring): |
|||
DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value)) |
|||
else: |
|||
DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value))) |
|||
|
|||
if not build_files: |
|||
build_files = FindBuildFiles() |
|||
if not build_files: |
|||
print >>sys.stderr, (usage + '\n\n%s: error: no build_file') % \ |
|||
(my_name, my_name) |
|||
return 1 |
|||
|
|||
# TODO(mark): Chromium-specific hack! |
|||
# For Chromium, the gyp "depth" variable should always be a relative path |
|||
# to Chromium's top-level "src" directory. If no depth variable was set |
|||
# on the command line, try to find a "src" directory by looking at the |
|||
# absolute path to each build file's directory. The first "src" component |
|||
# found will be treated as though it were the path used for --depth. |
|||
if not options.depth: |
|||
for build_file in build_files: |
|||
build_file_dir = os.path.abspath(os.path.dirname(build_file)) |
|||
build_file_dir_components = build_file_dir.split(os.path.sep) |
|||
components_len = len(build_file_dir_components) |
|||
for index in xrange(components_len - 1, -1, -1): |
|||
if build_file_dir_components[index] == 'src': |
|||
options.depth = os.path.sep.join(build_file_dir_components) |
|||
break |
|||
del build_file_dir_components[index] |
|||
|
|||
# If the inner loop found something, break without advancing to another |
|||
# build file. |
|||
if options.depth: |
|||
break |
|||
|
|||
if not options.depth: |
|||
raise Exception, \ |
|||
'Could not automatically locate src directory. This is a ' + \ |
|||
'temporary Chromium feature that will be removed. Use ' + \ |
|||
'--depth as a workaround.' |
|||
|
|||
# If toplevel-dir is not set, we assume that depth is the root of our source |
|||
# tree. |
|||
if not options.toplevel_dir: |
|||
options.toplevel_dir = options.depth |
|||
|
|||
# -D on the command line sets variable defaults - D isn't just for define, |
|||
# it's for default. Perhaps there should be a way to force (-F?) a |
|||
# variable's value so that it can't be overridden by anything else. |
|||
cmdline_default_variables = {} |
|||
defines = [] |
|||
if options.use_environment: |
|||
defines += ShlexEnv('GYP_DEFINES') |
|||
if options.defines: |
|||
defines += options.defines |
|||
cmdline_default_variables = NameValueListToDict(defines) |
|||
if DEBUG_GENERAL in gyp.debug.keys(): |
|||
DebugOutput(DEBUG_GENERAL, |
|||
"cmdline_default_variables: %s" % cmdline_default_variables) |
|||
|
|||
# Set up includes. |
|||
includes = [] |
|||
|
|||
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every |
|||
# .gyp file that's loaded, before anything else is included. |
|||
if home_dot_gyp != None: |
|||
default_include = os.path.join(home_dot_gyp, 'include.gypi') |
|||
if os.path.exists(default_include): |
|||
includes.append(default_include) |
|||
|
|||
# Command-line --include files come after the default include. |
|||
if options.includes: |
|||
includes.extend(options.includes) |
|||
|
|||
# Generator flags should be prefixed with the target generator since they |
|||
# are global across all generator runs. |
|||
gen_flags = [] |
|||
if options.use_environment: |
|||
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS') |
|||
if options.generator_flags: |
|||
gen_flags += options.generator_flags |
|||
generator_flags = NameValueListToDict(gen_flags) |
|||
if DEBUG_GENERAL in gyp.debug.keys(): |
|||
DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags) |
|||
|
|||
# TODO: Remove this and the option after we've gotten folks to move to the |
|||
# generator flag. |
|||
if options.msvs_version: |
|||
print >>sys.stderr, \ |
|||
'DEPRECATED: Use generator flag (-G msvs_version=' + \ |
|||
options.msvs_version + ') instead of --msvs-version=' + \ |
|||
options.msvs_version |
|||
generator_flags['msvs_version'] = options.msvs_version |
|||
|
|||
# Generate all requested formats (use a set in case we got one format request |
|||
# twice) |
|||
for format in set(options.formats): |
|||
params = {'options': options, |
|||
'build_files': build_files, |
|||
'generator_flags': generator_flags, |
|||
'cwd': os.getcwd(), |
|||
'build_files_arg': build_files_arg, |
|||
'gyp_binary': sys.argv[0], |
|||
'home_dot_gyp': home_dot_gyp} |
|||
|
|||
# Start with the default variables from the command line. |
|||
[generator, flat_list, targets, data] = Load(build_files, format, |
|||
cmdline_default_variables, |
|||
includes, options.depth, |
|||
params, options.check, |
|||
options.circular_check) |
|||
|
|||
# TODO(mark): Pass |data| for now because the generator needs a list of |
|||
# build files that came in. In the future, maybe it should just accept |
|||
# a list, and not the whole data dict. |
|||
# NOTE: flat_list is the flattened dependency graph specifying the order |
|||
# that targets may be built. Build systems that operate serially or that |
|||
# need to have dependencies defined before dependents reference them should |
|||
# generate targets in the order specified in flat_list. |
|||
generator.GenerateOutput(flat_list, targets, data, params) |
|||
|
|||
# Done |
|||
return 0 |
|||
|
|||
|
|||
if __name__ == '__main__': |
|||
sys.exit(main(sys.argv[1:])) |
@ -0,0 +1,362 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
import errno |
|||
import filecmp |
|||
import os.path |
|||
import re |
|||
import tempfile |
|||
import sys |
|||
|
|||
|
|||
# A minimal memoizing decorator. It'll blow up if the args aren't immutable, |
|||
# among other "problems". |
|||
class memoize(object): |
|||
def __init__(self, func): |
|||
self.func = func |
|||
self.cache = {} |
|||
def __call__(self, *args): |
|||
try: |
|||
return self.cache[args] |
|||
except KeyError: |
|||
result = self.func(*args) |
|||
self.cache[args] = result |
|||
return result |
|||
|
|||
|
|||
def ExceptionAppend(e, msg): |
|||
"""Append a message to the given exception's message.""" |
|||
if not e.args: |
|||
e.args = (msg,) |
|||
elif len(e.args) == 1: |
|||
e.args = (str(e.args[0]) + ' ' + msg,) |
|||
else: |
|||
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:] |
|||
|
|||
|
|||
def ParseQualifiedTarget(target): |
|||
# Splits a qualified target into a build file, target name and toolset. |
|||
|
|||
# NOTE: rsplit is used to disambiguate the Windows drive letter separator. |
|||
target_split = target.rsplit(':', 1) |
|||
if len(target_split) == 2: |
|||
[build_file, target] = target_split |
|||
else: |
|||
build_file = None |
|||
|
|||
target_split = target.rsplit('#', 1) |
|||
if len(target_split) == 2: |
|||
[target, toolset] = target_split |
|||
else: |
|||
toolset = None |
|||
|
|||
return [build_file, target, toolset] |
|||
|
|||
|
|||
def ResolveTarget(build_file, target, toolset): |
|||
# This function resolves a target into a canonical form: |
|||
# - a fully defined build file, either absolute or relative to the current |
|||
# directory |
|||
# - a target name |
|||
# - a toolset |
|||
# |
|||
# build_file is the file relative to which 'target' is defined. |
|||
# target is the qualified target. |
|||
# toolset is the default toolset for that target. |
|||
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) |
|||
|
|||
if parsed_build_file: |
|||
if build_file: |
|||
# If a relative path, parsed_build_file is relative to the directory |
|||
# containing build_file. If build_file is not in the current directory, |
|||
# parsed_build_file is not a usable path as-is. Resolve it by |
|||
# interpreting it as relative to build_file. If parsed_build_file is |
|||
# absolute, it is usable as a path regardless of the current directory, |
|||
# and os.path.join will return it as-is. |
|||
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file), |
|||
parsed_build_file)) |
|||
else: |
|||
build_file = parsed_build_file |
|||
|
|||
if parsed_toolset: |
|||
toolset = parsed_toolset |
|||
|
|||
return [build_file, target, toolset] |
|||
|
|||
|
|||
def BuildFile(fully_qualified_target): |
|||
# Extracts the build file from the fully qualified target. |
|||
return ParseQualifiedTarget(fully_qualified_target)[0] |
|||
|
|||
|
|||
def QualifiedTarget(build_file, target, toolset): |
|||
# "Qualified" means the file that a target was defined in and the target |
|||
# name, separated by a colon, suffixed by a # and the toolset name: |
|||
# /path/to/file.gyp:target_name#toolset |
|||
fully_qualified = build_file + ':' + target |
|||
if toolset: |
|||
fully_qualified = fully_qualified + '#' + toolset |
|||
return fully_qualified |
|||
|
|||
|
|||
@memoize |
|||
def RelativePath(path, relative_to): |
|||
# Assuming both |path| and |relative_to| are relative to the current |
|||
# directory, returns a relative path that identifies path relative to |
|||
# relative_to. |
|||
|
|||
# Convert to absolute (and therefore normalized paths). |
|||
path = os.path.abspath(path) |
|||
relative_to = os.path.abspath(relative_to) |
|||
|
|||
# Split the paths into components. |
|||
path_split = path.split(os.path.sep) |
|||
relative_to_split = relative_to.split(os.path.sep) |
|||
|
|||
# Determine how much of the prefix the two paths share. |
|||
prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) |
|||
|
|||
# Put enough ".." components to back up out of relative_to to the common |
|||
# prefix, and then append the part of path_split after the common prefix. |
|||
relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \ |
|||
path_split[prefix_len:] |
|||
|
|||
if len(relative_split) == 0: |
|||
# The paths were the same. |
|||
return '' |
|||
|
|||
# Turn it back into a string and we're done. |
|||
return os.path.join(*relative_split) |
|||
|
|||
|
|||
def FixIfRelativePath(path, relative_to): |
|||
# Like RelativePath but returns |path| unchanged if it is absolute. |
|||
if os.path.isabs(path): |
|||
return path |
|||
return RelativePath(path, relative_to) |
|||
|
|||
|
|||
def UnrelativePath(path, relative_to): |
|||
# Assuming that |relative_to| is relative to the current directory, and |path| |
|||
# is a path relative to the dirname of |relative_to|, returns a path that |
|||
# identifies |path| relative to the current directory. |
|||
rel_dir = os.path.dirname(relative_to) |
|||
return os.path.normpath(os.path.join(rel_dir, path)) |
|||
|
|||
|
|||
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at |
|||
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 |
|||
# and the documentation for various shells. |
|||
|
|||
# _quote is a pattern that should match any argument that needs to be quoted |
|||
# with double-quotes by EncodePOSIXShellArgument. It matches the following |
|||
# characters appearing anywhere in an argument: |
|||
# \t, \n, space parameter separators |
|||
# # comments |
|||
# $ expansions (quoted to always expand within one argument) |
|||
# % called out by IEEE 1003.1 XCU.2.2 |
|||
# & job control |
|||
# ' quoting |
|||
# (, ) subshell execution |
|||
# *, ?, [ pathname expansion |
|||
# ; command delimiter |
|||
# <, >, | redirection |
|||
# = assignment |
|||
# {, } brace expansion (bash) |
|||
# ~ tilde expansion |
|||
# It also matches the empty string, because "" (or '') is the only way to |
|||
# represent an empty string literal argument to a POSIX shell. |
|||
# |
|||
# This does not match the characters in _escape, because those need to be |
|||
# backslash-escaped regardless of whether they appear in a double-quoted |
|||
# string. |
|||
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$') |
|||
|
|||
# _escape is a pattern that should match any character that needs to be |
|||
# escaped with a backslash, whether or not the argument matched the _quote |
|||
# pattern. _escape is used with re.sub to backslash anything in _escape's |
|||
# first match group, hence the (parentheses) in the regular expression. |
|||
# |
|||
# _escape matches the following characters appearing anywhere in an argument: |
|||
# " to prevent POSIX shells from interpreting this character for quoting |
|||
# \ to prevent POSIX shells from interpreting this character for escaping |
|||
# ` to prevent POSIX shells from interpreting this character for command |
|||
# substitution |
|||
# Missing from this list is $, because the desired behavior of |
|||
# EncodePOSIXShellArgument is to permit parameter (variable) expansion. |
|||
# |
|||
# Also missing from this list is !, which bash will interpret as the history |
|||
# expansion character when history is enabled. bash does not enable history |
|||
# by default in non-interactive shells, so this is not thought to be a problem. |
|||
# ! was omitted from this list because bash interprets "\!" as a literal string |
|||
# including the backslash character (avoiding history expansion but retaining |
|||
# the backslash), which would not be correct for argument encoding. Handling |
|||
# this case properly would also be problematic because bash allows the history |
|||
# character to be changed with the histchars shell variable. Fortunately, |
|||
# as history is not enabled in non-interactive shells and |
|||
# EncodePOSIXShellArgument is only expected to encode for non-interactive |
|||
# shells, there is no room for error here by ignoring !. |
|||
_escape = re.compile(r'(["\\`])') |
|||
|
|||
def EncodePOSIXShellArgument(argument): |
|||
"""Encodes |argument| suitably for consumption by POSIX shells. |
|||
|
|||
argument may be quoted and escaped as necessary to ensure that POSIX shells |
|||
treat the returned value as a literal representing the argument passed to |
|||
this function. Parameter (variable) expansions beginning with $ are allowed |
|||
to remain intact without escaping the $, to allow the argument to contain |
|||
references to variables to be expanded by the shell. |
|||
""" |
|||
|
|||
if not isinstance(argument, str): |
|||
argument = str(argument) |
|||
|
|||
if _quote.search(argument): |
|||
quote = '"' |
|||
else: |
|||
quote = '' |
|||
|
|||
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote |
|||
|
|||
return encoded |
|||
|
|||
|
|||
def EncodePOSIXShellList(list): |
|||
"""Encodes |list| suitably for consumption by POSIX shells. |
|||
|
|||
Returns EncodePOSIXShellArgument for each item in list, and joins them |
|||
together using the space character as an argument separator. |
|||
""" |
|||
|
|||
encoded_arguments = [] |
|||
for argument in list: |
|||
encoded_arguments.append(EncodePOSIXShellArgument(argument)) |
|||
return ' '.join(encoded_arguments) |
|||
|
|||
|
|||
def DeepDependencyTargets(target_dicts, roots): |
|||
"""Returns the recursive list of target dependencies.""" |
|||
dependencies = set() |
|||
pending = set(roots) |
|||
while pending: |
|||
# Pluck out one. |
|||
r = pending.pop() |
|||
# Skip if visited already. |
|||
if r in dependencies: |
|||
continue |
|||
# Add it. |
|||
dependencies.add(r) |
|||
# Add its children. |
|||
spec = target_dicts[r] |
|||
pending.update(set(spec.get('dependencies', []))) |
|||
pending.update(set(spec.get('dependencies_original', []))) |
|||
return list(dependencies - set(roots)) |
|||
|
|||
|
|||
def BuildFileTargets(target_list, build_file): |
|||
"""From a target_list, returns the subset from the specified build_file. |
|||
""" |
|||
return [p for p in target_list if BuildFile(p) == build_file] |
|||
|
|||
|
|||
def AllTargets(target_list, target_dicts, build_file): |
|||
"""Returns all targets (direct and dependencies) for the specified build_file. |
|||
""" |
|||
bftargets = BuildFileTargets(target_list, build_file) |
|||
deptargets = DeepDependencyTargets(target_dicts, bftargets) |
|||
return bftargets + deptargets |
|||
|
|||
|
|||
def WriteOnDiff(filename): |
|||
"""Write to a file only if the new contents differ. |
|||
|
|||
Arguments: |
|||
filename: name of the file to potentially write to. |
|||
Returns: |
|||
A file like object which will write to temporary file and only overwrite |
|||
the target if it differs (on close). |
|||
""" |
|||
|
|||
class Writer: |
|||
"""Wrapper around file which only covers the target if it differs.""" |
|||
def __init__(self): |
|||
# Pick temporary file. |
|||
tmp_fd, self.tmp_path = tempfile.mkstemp( |
|||
suffix='.tmp', |
|||
prefix=os.path.split(filename)[1] + '.gyp.', |
|||
dir=os.path.split(filename)[0]) |
|||
try: |
|||
self.tmp_file = os.fdopen(tmp_fd, 'wb') |
|||
except Exception: |
|||
# Don't leave turds behind. |
|||
os.unlink(self.tmp_path) |
|||
raise |
|||
|
|||
def __getattr__(self, attrname): |
|||
# Delegate everything else to self.tmp_file |
|||
return getattr(self.tmp_file, attrname) |
|||
|
|||
def close(self): |
|||
try: |
|||
# Close tmp file. |
|||
self.tmp_file.close() |
|||
# Determine if different. |
|||
same = False |
|||
try: |
|||
same = filecmp.cmp(self.tmp_path, filename, False) |
|||
except OSError, e: |
|||
if e.errno != errno.ENOENT: |
|||
raise |
|||
|
|||
if same: |
|||
# The new file is identical to the old one, just get rid of the new |
|||
# one. |
|||
os.unlink(self.tmp_path) |
|||
else: |
|||
# The new file is different from the old one, or there is no old one. |
|||
# Rename the new file to the permanent name. |
|||
# |
|||
# tempfile.mkstemp uses an overly restrictive mode, resulting in a |
|||
# file that can only be read by the owner, regardless of the umask. |
|||
# There's no reason to not respect the umask here, which means that |
|||
# an extra hoop is required to fetch it and reset the new file's mode. |
|||
# |
|||
# No way to get the umask without setting a new one? Set a safe one |
|||
# and then set it back to the old value. |
|||
umask = os.umask(077) |
|||
os.umask(umask) |
|||
os.chmod(self.tmp_path, 0666 & ~umask) |
|||
if sys.platform == 'win32' and os.path.exists(filename): |
|||
# NOTE: on windows (but not cygwin) rename will not replace an |
|||
# existing file, so it must be preceded with a remove. Sadly there |
|||
# is no way to make the switch atomic. |
|||
os.remove(filename) |
|||
os.rename(self.tmp_path, filename) |
|||
except Exception: |
|||
# Don't leave turds behind. |
|||
os.unlink(self.tmp_path) |
|||
raise |
|||
|
|||
return Writer() |
|||
|
|||
|
|||
# From Alex Martelli, |
|||
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 |
|||
# ASPN: Python Cookbook: Remove duplicates from a sequence |
|||
# First comment, dated 2001/10/13. |
|||
# (Also in the printed Python Cookbook.) |
|||
|
|||
def uniquer(seq, idfun=None): |
|||
if idfun is None: |
|||
def idfun(x): return x |
|||
seen = {} |
|||
result = [] |
|||
for item in seq: |
|||
marker = idfun(item) |
|||
if marker in seen: continue |
|||
seen[marker] = 1 |
|||
result.append(item) |
|||
return result |
@ -0,0 +1,121 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
import xml.dom |
|||
import xml_fix |
|||
import common |
|||
|
|||
class EasyXml(object): |
|||
""" Class to easily create XML files with substantial pre-defined structures. |
|||
|
|||
Visual Studio files have a lot of pre-defined structures. This class makes |
|||
it easy to represent these structures as Python data structures, instead of |
|||
having to create a lot of function calls. |
|||
|
|||
For this class, an XML element is represented as a list composed of: |
|||
1. The name of the element, a string, |
|||
2. The attributes of the element, an dictionary (optional), and |
|||
3+. The content of the element, if any. Strings are simple text nodes and |
|||
lists are child elements. |
|||
|
|||
Example 1: |
|||
<test/> |
|||
becomes |
|||
['test'] |
|||
|
|||
Example 2: |
|||
<myelement a='value1' b='value2'> |
|||
<childtype>This is</childtype> |
|||
<childtype>it!</childtype> |
|||
</myelement> |
|||
|
|||
becomes |
|||
['myelement', {'a':'value1', 'b':'value2'}, |
|||
['childtype', 'This is'], |
|||
['childtype', 'it!'], |
|||
] |
|||
""" |
|||
|
|||
def __init__(self, name, attributes=None): |
|||
""" Constructs an object representing an XML document. |
|||
|
|||
Args: |
|||
name: A string, the name of the root element. |
|||
attributes: A dictionary, the attributes of the root. |
|||
""" |
|||
xml_impl = xml.dom.getDOMImplementation() |
|||
self.doc = xml_impl.createDocument(None, name, None) |
|||
if attributes: |
|||
self.SetAttributes(self.doc.documentElement, attributes) |
|||
|
|||
def AppendChildren(self, parent, children_specifications): |
|||
""" Appends multiple children. |
|||
|
|||
Args: |
|||
parent: The node to which the children will be added. |
|||
children_specifications: A list of node specifications. |
|||
""" |
|||
for specification in children_specifications: |
|||
# If it's a string, append a text node. |
|||
# Otherwise append an XML node. |
|||
if isinstance(specification, str): |
|||
parent.appendChild(self.doc.createTextNode(specification)) |
|||
else: |
|||
self.AppendNode(parent, specification) |
|||
|
|||
def AppendNode(self, parent, specification): |
|||
""" Appends multiple children. |
|||
|
|||
Args: |
|||
parent: The node to which the child will be added. |
|||
children_specifications: A list, the node specification. The first |
|||
entry is the name of the element. If the second entry is a |
|||
dictionary, it is the attributes. The remaining entries of the |
|||
list are the sub-elements. |
|||
Returns: |
|||
The XML element created. |
|||
""" |
|||
name = specification[0] |
|||
if not isinstance(name, str): |
|||
raise Exception('The first item of an EasyXml specification should be ' |
|||
'a string. Specification was ' + str(specification)) |
|||
element = self.doc.createElement(name) |
|||
parent.appendChild(element) |
|||
rest = specification[1:] |
|||
# The second element is optionally a dictionary of the attributes. |
|||
if rest and isinstance(rest[0], dict): |
|||
self.SetAttributes(element, rest[0]) |
|||
rest = rest[1:] |
|||
if rest: |
|||
self.AppendChildren(element, rest) |
|||
return element |
|||
|
|||
def SetAttributes(self, element, attribute_description): |
|||
""" Sets the attributes of an element. |
|||
|
|||
Args: |
|||
element: The node to which the child will be added. |
|||
attribute_description: A dictionary that maps attribute names to |
|||
attribute values. |
|||
""" |
|||
for attribute, value in attribute_description.iteritems(): |
|||
element.setAttribute(attribute, value) |
|||
|
|||
def Root(self): |
|||
""" Returns the root element. """ |
|||
return self.doc.documentElement |
|||
|
|||
def WriteIfChanged(self, path): |
|||
""" Writes the XML doc but don't touch the file if unchanged. """ |
|||
f = common.WriteOnDiff(path) |
|||
fix = xml_fix.XmlFix() |
|||
self.doc.writexml(f, encoding='utf-8', addindent='', newl='') |
|||
fix.Cleanup() |
|||
f.close() |
|||
|
|||
def __str__(self): |
|||
""" Converts the doc to a string. """ |
|||
return self.doc.toxml() |
@ -0,0 +1,92 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
""" Unit tests for the easy_xml.py file. """ |
|||
|
|||
import easy_xml |
|||
import unittest |
|||
import StringIO |
|||
|
|||
|
|||
class TestSequenceFunctions(unittest.TestCase): |
|||
|
|||
def setUp(self): |
|||
self.stderr = StringIO.StringIO() |
|||
|
|||
def test_EasyXml_simple(self): |
|||
xml = easy_xml.EasyXml('test') |
|||
self.assertEqual(str(xml), '<?xml version="1.0" ?><test/>') |
|||
|
|||
def test_EasyXml_simple_with_attributes(self): |
|||
xml = easy_xml.EasyXml('test2', {'a': 'value1', 'b': 'value2'}) |
|||
self.assertEqual(str(xml), |
|||
'<?xml version="1.0" ?><test2 a="value1" b="value2"/>') |
|||
|
|||
def test_EasyXml_add_node(self): |
|||
# We want to create: |
|||
target = ('<?xml version="1.0" ?>' |
|||
'<test3>' |
|||
'<GrandParent>' |
|||
'<Parent1>' |
|||
'<Child/>' |
|||
'</Parent1>' |
|||
'<Parent2/>' |
|||
'</GrandParent>' |
|||
'</test3>') |
|||
|
|||
# Do it the hard way first: |
|||
xml = easy_xml.EasyXml('test3') |
|||
grand_parent = xml.AppendNode(xml.Root(), ['GrandParent']) |
|||
parent1 = xml.AppendNode(grand_parent, ['Parent1']) |
|||
parent2 = xml.AppendNode(grand_parent, ['Parent2']) |
|||
xml.AppendNode(parent1, ['Child']) |
|||
self.assertEqual(str(xml), target) |
|||
|
|||
# Do it the easier way: |
|||
xml = easy_xml.EasyXml('test3') |
|||
xml.AppendNode(xml.Root(), |
|||
['GrandParent', |
|||
['Parent1', ['Child']], |
|||
['Parent2']]) |
|||
self.assertEqual(str(xml), target) |
|||
|
|||
def test_EasyXml_complex(self): |
|||
# We want to create: |
|||
target = ('<?xml version="1.0" ?>' |
|||
'<Project>' |
|||
'<PropertyGroup Label="Globals">' |
|||
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>' |
|||
'<Keyword>Win32Proj</Keyword>' |
|||
'<RootNamespace>automated_ui_tests</RootNamespace>' |
|||
'</PropertyGroup>' |
|||
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>' |
|||
'<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\'' |
|||
'Debug|Win32\'" Label="Configuration">' |
|||
'<ConfigurationType>Application</ConfigurationType>' |
|||
'<CharacterSet>Unicode</CharacterSet>' |
|||
'</PropertyGroup>' |
|||
'</Project>') |
|||
|
|||
xml = easy_xml.EasyXml('Project') |
|||
xml.AppendChildren(xml.Root(), [ |
|||
['PropertyGroup', {'Label': 'Globals'}, |
|||
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'], |
|||
['Keyword', 'Win32Proj'], |
|||
['RootNamespace', 'automated_ui_tests'] |
|||
], |
|||
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}], |
|||
['PropertyGroup', |
|||
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'", |
|||
'Label': 'Configuration'}, |
|||
['ConfigurationType', 'Application'], |
|||
['CharacterSet', 'Unicode'] |
|||
] |
|||
]) |
|||
self.assertEqual(str(xml), target) |
|||
|
|||
|
|||
if __name__ == '__main__': |
|||
unittest.main() |
@ -0,0 +1,56 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
import collections |
|||
import gyp |
|||
import gyp.common |
|||
import json |
|||
|
|||
generator_wants_static_library_dependencies_adjusted = False |
|||
|
|||
generator_default_variables = { |
|||
'OS': 'linux', |
|||
} |
|||
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR', |
|||
'LIB_DIR', 'SHARED_LIB_DIR']: |
|||
# Some gyp steps fail if these are empty(!). |
|||
generator_default_variables[dirname] = 'dir' |
|||
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', |
|||
'RULE_INPUT_EXT', |
|||
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', |
|||
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', |
|||
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', |
|||
'LINKER_SUPPORTS_ICF']: |
|||
generator_default_variables[unused] = '' |
|||
|
|||
|
|||
def CalculateVariables(default_variables, params): |
|||
generator_flags = params.get('generator_flags', {}) |
|||
default_variables['OS'] = generator_flags.get('os', 'linux') |
|||
|
|||
|
|||
def GenerateOutput(target_list, target_dicts, data, params): |
|||
# Map of target -> list of targets it depends on. |
|||
edges = {} |
|||
|
|||
# Queue of targets to visit. |
|||
targets_to_visit = target_list[:] |
|||
|
|||
while len(targets_to_visit) > 0: |
|||
target = targets_to_visit.pop() |
|||
if target in edges: |
|||
continue |
|||
edges[target] = [] |
|||
|
|||
for dep in target_dicts[target].get('dependencies', []): |
|||
edges[target].append(dep) |
|||
targets_to_visit.append(dep) |
|||
|
|||
filename = 'dump.json' |
|||
f = open(filename, 'w') |
|||
json.dump(edges, f) |
|||
f.close() |
|||
print 'Wrote json to %s.' % filename |
@ -0,0 +1,88 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""gypd output module |
|||
|
|||
This module produces gyp input as its output. Output files are given the |
|||
.gypd extension to avoid overwriting the .gyp files that they are generated |
|||
from. Internal references to .gyp files (such as those found in |
|||
"dependencies" sections) are not adjusted to point to .gypd files instead; |
|||
unlike other paths, which are relative to the .gyp or .gypd file, such paths |
|||
are relative to the directory from which gyp was run to create the .gypd file. |
|||
|
|||
This generator module is intended to be a sample and a debugging aid, hence |
|||
the "d" for "debug" in .gypd. It is useful to inspect the results of the |
|||
various merges, expansions, and conditional evaluations performed by gyp |
|||
and to see a representation of what would be fed to a generator module. |
|||
|
|||
It's not advisable to rename .gypd files produced by this module to .gyp, |
|||
because they will have all merges, expansions, and evaluations already |
|||
performed and the relevant constructs not present in the output; paths to |
|||
dependencies may be wrong; and various sections that do not belong in .gyp |
|||
files such as such as "included_files" and "*_excluded" will be present. |
|||
Output will also be stripped of comments. This is not intended to be a |
|||
general-purpose gyp pretty-printer; for that, you probably just want to |
|||
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip |
|||
comments but won't do all of the other things done to this module's output. |
|||
|
|||
The specific formatting of the output generated by this module is subject |
|||
to change. |
|||
""" |
|||
|
|||
|
|||
import gyp.common |
|||
import errno |
|||
import os |
|||
import pprint |
|||
|
|||
|
|||
# These variables should just be spit back out as variable references. |
|||
_generator_identity_variables = [ |
|||
'EXECUTABLE_PREFIX', |
|||
'EXECUTABLE_SUFFIX', |
|||
'INTERMEDIATE_DIR', |
|||
'PRODUCT_DIR', |
|||
'RULE_INPUT_ROOT', |
|||
'RULE_INPUT_EXT', |
|||
'RULE_INPUT_NAME', |
|||
'RULE_INPUT_PATH', |
|||
'SHARED_INTERMEDIATE_DIR', |
|||
] |
|||
|
|||
# gypd doesn't define a default value for OS like many other generator |
|||
# modules. Specify "-D OS=whatever" on the command line to provide a value. |
|||
generator_default_variables = { |
|||
} |
|||
|
|||
# gypd supports multiple toolsets |
|||
generator_supports_multiple_toolsets = True |
|||
|
|||
# TODO(mark): This always uses <, which isn't right. The input module should |
|||
# notify the generator to tell it which phase it is operating in, and this |
|||
# module should use < for the early phase and then switch to > for the late |
|||
# phase. Bonus points for carrying @ back into the output too. |
|||
for v in _generator_identity_variables: |
|||
generator_default_variables[v] = '<(%s)' % v |
|||
|
|||
|
|||
def GenerateOutput(target_list, target_dicts, data, params): |
|||
output_files = {} |
|||
for qualified_target in target_list: |
|||
[input_file, target] = \ |
|||
gyp.common.ParseQualifiedTarget(qualified_target)[0:2] |
|||
|
|||
if input_file[-4:] != '.gyp': |
|||
continue |
|||
input_file_stem = input_file[:-4] |
|||
output_file = input_file_stem + params['options'].suffix + '.gypd' |
|||
|
|||
if not output_file in output_files: |
|||
output_files[output_file] = input_file |
|||
|
|||
for output_file, input_file in output_files.iteritems(): |
|||
output = open(output_file, 'w') |
|||
pprint.pprint(data[input_file], output) |
|||
output.close() |
@ -0,0 +1,57 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""gypsh output module |
|||
|
|||
gypsh is a GYP shell. It's not really a generator per se. All it does is |
|||
fire up an interactive Python session with a few local variables set to the |
|||
variables passed to the generator. Like gypd, it's intended as a debugging |
|||
aid, to facilitate the exploration of .gyp structures after being processed |
|||
by the input module. |
|||
|
|||
The expected usage is "gyp -f gypsh -D OS=desired_os". |
|||
""" |
|||
|
|||
|
|||
import code |
|||
import sys |
|||
|
|||
|
|||
# All of this stuff about generator variables was lovingly ripped from gypd.py. |
|||
# That module has a much better description of what's going on and why. |
|||
_generator_identity_variables = [ |
|||
'EXECUTABLE_PREFIX', |
|||
'EXECUTABLE_SUFFIX', |
|||
'INTERMEDIATE_DIR', |
|||
'PRODUCT_DIR', |
|||
'RULE_INPUT_ROOT', |
|||
'RULE_INPUT_EXT', |
|||
'RULE_INPUT_NAME', |
|||
'RULE_INPUT_PATH', |
|||
'SHARED_INTERMEDIATE_DIR', |
|||
] |
|||
|
|||
generator_default_variables = { |
|||
} |
|||
|
|||
for v in _generator_identity_variables: |
|||
generator_default_variables[v] = '<(%s)' % v |
|||
|
|||
|
|||
def GenerateOutput(target_list, target_dicts, data, params): |
|||
locals = { |
|||
'target_list': target_list, |
|||
'target_dicts': target_dicts, |
|||
'data': data, |
|||
} |
|||
|
|||
# Use a banner that looks like the stock Python one and like what |
|||
# code.interact uses by default, but tack on something to indicate what |
|||
# locals are available, and identify gypsh. |
|||
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \ |
|||
(sys.version, sys.platform, repr(sorted(locals.keys()))) |
|||
|
|||
code.interact(banner, local=locals) |
File diff suppressed because it is too large
File diff suppressed because it is too large
File diff suppressed because it is too large
File diff suppressed because it is too large
File diff suppressed because it is too large
@ -0,0 +1,187 @@ |
|||
#!/usr/bin/python |
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""Utility functions to perform Xcode-style build steps. |
|||
|
|||
These functions are executed via gyp-mac-tool when using the Makefile generator. |
|||
""" |
|||
|
|||
import os |
|||
import fcntl |
|||
import plistlib |
|||
import shutil |
|||
import string |
|||
import subprocess |
|||
import sys |
|||
|
|||
def main(args): |
|||
executor = MacTool() |
|||
executor.Dispatch(args) |
|||
|
|||
class MacTool(object): |
|||
"""This class performs all the Mac tooling steps. The methods can either be |
|||
executed directly, or dispatched from an argument list.""" |
|||
|
|||
def Dispatch(self, args): |
|||
"""Dispatches a string command to a method.""" |
|||
if len(args) < 1: |
|||
raise Exception("Not enough arguments") |
|||
|
|||
method = "Exec%s" % self._CommandifyName(args[0]) |
|||
getattr(self, method)(*args[1:]) |
|||
|
|||
def _CommandifyName(self, name_string): |
|||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist""" |
|||
return name_string.title().replace('-', '') |
|||
|
|||
def ExecFlock(self, lockfile, *cmd_list): |
|||
"""Emulates the most basic behavior of Linux's flock(1).""" |
|||
# Rely on exception handling to report errors. |
|||
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) |
|||
fcntl.flock(fd, fcntl.LOCK_EX) |
|||
return subprocess.call(cmd_list) |
|||
|
|||
def ExecCopyInfoPlist(self, source, dest): |
|||
"""Copies the |source| Info.plist to the destination directory |dest|.""" |
|||
# Read the source Info.plist into memory. |
|||
fd = open(source, 'r') |
|||
lines = fd.read() |
|||
fd.close() |
|||
|
|||
# Go through all the environment variables and replace them as variables in |
|||
# the file. |
|||
for key in os.environ: |
|||
if key.startswith('_'): |
|||
continue |
|||
evar = '${%s}' % key |
|||
lines = string.replace(lines, evar, os.environ[key]) |
|||
|
|||
# Write out the file with variables replaced. |
|||
fd = open(dest, 'w') |
|||
fd.write(lines) |
|||
fd.close() |
|||
|
|||
# Now write out PkgInfo file now that the Info.plist file has been |
|||
# "compiled". |
|||
self._WritePkgInfo(dest) |
|||
|
|||
def _WritePkgInfo(self, info_plist): |
|||
"""This writes the PkgInfo file from the data stored in Info.plist.""" |
|||
plist = plistlib.readPlist(info_plist) |
|||
if not plist: |
|||
return |
|||
|
|||
# The format of PkgInfo is eight characters, representing the bundle type |
|||
# and bundle signature, each four characters. If either is missing, four |
|||
# '?' characters are used instead. |
|||
package_type = plist['CFBundlePackageType'] |
|||
if len(package_type) != 4: |
|||
package_type = '?' * 4 |
|||
signature_code = plist['CFBundleSignature'] |
|||
if len(signature_code) != 4: |
|||
signature_code = '?' * 4 |
|||
|
|||
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') |
|||
fp = open(dest, 'w') |
|||
fp.write('%s%s' % (package_type, signature_code)) |
|||
fp.close() |
|||
|
|||
def ExecPackageFramework(self, framework, version): |
|||
"""Takes a path to Something.framework and the Current version of that and |
|||
sets up all the symlinks.""" |
|||
# Find the name of the binary based on the part before the ".framework". |
|||
binary = os.path.basename(framework).split('.')[0] |
|||
|
|||
CURRENT = 'Current' |
|||
RESOURCES = 'Resources' |
|||
VERSIONS = 'Versions' |
|||
|
|||
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): |
|||
# Binary-less frameworks don't seem to contain symlinks (see e.g. |
|||
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). |
|||
return |
|||
|
|||
# Move into the framework directory to set the symlinks correctly. |
|||
pwd = os.getcwd() |
|||
os.chdir(framework) |
|||
|
|||
# Set up the Current version. |
|||
self._Relink(version, os.path.join(VERSIONS, CURRENT)) |
|||
|
|||
# Set up the root symlinks. |
|||
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) |
|||
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) |
|||
|
|||
# Back to where we were before! |
|||
os.chdir(pwd) |
|||
|
|||
def _Relink(self, dest, link): |
|||
"""Creates a symlink to |dest| named |link|. If |link| already exists, |
|||
it is overwritten.""" |
|||
if os.path.lexists(link): |
|||
os.remove(link) |
|||
os.symlink(dest, link) |
|||
|
|||
def ExecCopyBundleResource(self, source, dest): |
|||
"""Copies a resource file to the bundle/Resources directory, performing any |
|||
necessary compilation on each resource.""" |
|||
extension = os.path.splitext(source)[1].lower() |
|||
if os.path.isdir(source): |
|||
# Copy tree. |
|||
if os.path.exists(dest): |
|||
shutil.rmtree(dest) |
|||
shutil.copytree(source, dest) |
|||
elif extension == '.xib': |
|||
self._CopyXIBFile(source, dest) |
|||
elif extension == '.strings': |
|||
self._CopyStringsFile(source, dest) |
|||
# TODO: Given that files with arbitrary extensions can be copied to the |
|||
# bundle, we will want to get rid of this whitelist eventually. |
|||
elif extension in [ |
|||
'.icns', '.manifest', '.pak', '.pdf', '.png', '.sb', '.sh', |
|||
'.ttf', '.sdef']: |
|||
shutil.copyfile(source, dest) |
|||
else: |
|||
raise NotImplementedError( |
|||
"Don't know how to copy bundle resources of type %s while copying " |
|||
"%s to %s)" % (extension, source, dest)) |
|||
|
|||
def _CopyXIBFile(self, source, dest): |
|||
"""Compiles a XIB file with ibtool into a binary plist in the bundle.""" |
|||
args = ['/Developer/usr/bin/ibtool', '--errors', '--warnings', |
|||
'--notices', '--output-format', 'human-readable-text', '--compile', |
|||
dest, source] |
|||
subprocess.call(args) |
|||
|
|||
def _CopyStringsFile(self, source, dest): |
|||
"""Copies a .strings file using iconv to reconvert the input into UTF-16.""" |
|||
input_code = self._DetectInputEncoding(source) or "UTF-8" |
|||
fp = open(dest, 'w') |
|||
args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code', |
|||
'UTF-16', source] |
|||
subprocess.call(args, stdout=fp) |
|||
fp.close() |
|||
|
|||
def _DetectInputEncoding(self, file_name): |
|||
"""Reads the first few bytes from file_name and tries to guess the text |
|||
encoding. Returns None as a guess if it can't detect it.""" |
|||
fp = open(file_name, 'rb') |
|||
try: |
|||
header = fp.read(3) |
|||
except e: |
|||
fp.close() |
|||
return None |
|||
fp.close() |
|||
if header.startswith("\xFE\xFF"): |
|||
return "UTF-16BE" |
|||
elif header.startswith("\xFF\xFE"): |
|||
return "UTF-16LE" |
|||
elif header.startswith("\xEF\xBB\xBF"): |
|||
return "UTF-8" |
|||
else: |
|||
return None |
|||
|
|||
if __name__ == '__main__': |
|||
sys.exit(main(sys.argv[1:])) |
@ -0,0 +1,77 @@ |
|||
#!/usr/bin/python |
|||
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
import os |
|||
import tempfile |
|||
import shutil |
|||
import subprocess |
|||
|
|||
def TestCommands(commands, files={}, env={}): |
|||
"""Run commands in a temporary directory, returning true if they all succeed. |
|||
Return false on failures or if any commands produce output. |
|||
|
|||
Arguments: |
|||
commands: an array of shell-interpretable commands, e.g. ['ls -l', 'pwd'] |
|||
each will be expanded with Python %-expansion using env first. |
|||
files: a dictionary mapping filename to contents; |
|||
files will be created in the temporary directory before running |
|||
the command. |
|||
env: a dictionary of strings to expand commands with. |
|||
""" |
|||
tempdir = tempfile.mkdtemp() |
|||
try: |
|||
for name, contents in files.items(): |
|||
f = open(os.path.join(tempdir, name), 'wb') |
|||
f.write(contents) |
|||
f.close() |
|||
for command in commands: |
|||
proc = subprocess.Popen(command % env, shell=True, |
|||
stdout=subprocess.PIPE, |
|||
stderr=subprocess.STDOUT, |
|||
cwd=tempdir) |
|||
output = proc.communicate()[0] |
|||
if proc.returncode != 0 or output: |
|||
return False |
|||
return True |
|||
finally: |
|||
shutil.rmtree(tempdir) |
|||
return False |
|||
|
|||
|
|||
def TestArSupportsT(ar_command='ar', cc_command='cc'): |
|||
"""Test whether 'ar' supports the 'T' flag.""" |
|||
return TestCommands(['%(cc)s -c test.c', |
|||
'%(ar)s crsT test.a test.o', |
|||
'%(cc)s test.a'], |
|||
files={'test.c': 'int main(){}'}, |
|||
env={'ar': ar_command, 'cc': cc_command}) |
|||
|
|||
|
|||
def TestLinkerSupportsThreads(cc_command='cc'): |
|||
"""Test whether the linker supports the --threads flag.""" |
|||
return TestCommands(['%(cc)s -Wl,--threads test.c'], |
|||
files={'test.c': 'int main(){}'}, |
|||
env={'cc': cc_command}) |
|||
|
|||
|
|||
def TestLinkerSupportsICF(cc_command='cc'): |
|||
"""Test whether the linker supports identical code folding.""" |
|||
return TestCommands(['%(cc)s -Wl,--icf=safe test.c'], |
|||
files={'test.c': 'int main(){}'}, |
|||
env={'cc': cc_command}) |
|||
|
|||
|
|||
if __name__ == '__main__': |
|||
# Run the various test functions and print the results. |
|||
def RunTest(description, function, **kwargs): |
|||
print "Testing " + description + ':', |
|||
if function(**kwargs): |
|||
print 'ok' |
|||
else: |
|||
print 'fail' |
|||
RunTest("ar 'T' flag", TestArSupportsT) |
|||
RunTest("ar 'T' flag with ccache", TestArSupportsT, cc_command='ccache cc') |
|||
RunTest("ld --threads", TestLinkerSupportsThreads) |
File diff suppressed because it is too large
@ -0,0 +1,70 @@ |
|||
#!/usr/bin/python |
|||
# Copyright (c) 2011 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
"""Applies a fix to CR LF TAB handling in xml.dom. |
|||
|
|||
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 |
|||
Working around this: http://bugs.python.org/issue5752 |
|||
TODO(bradnelson): Consider dropping this when we drop XP support. |
|||
""" |
|||
|
|||
|
|||
import xml.dom.minidom |
|||
|
|||
|
|||
def _Replacement_write_data(writer, data, is_attrib=False): |
|||
"""Writes datachars to writer.""" |
|||
data = data.replace("&", "&").replace("<", "<") |
|||
data = data.replace("\"", """).replace(">", ">") |
|||
if is_attrib: |
|||
data = data.replace( |
|||
"\r", "
").replace( |
|||
"\n", "
").replace( |
|||
"\t", "	") |
|||
writer.write(data) |
|||
|
|||
|
|||
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): |
|||
# indent = current indentation |
|||
# addindent = indentation to add to higher levels |
|||
# newl = newline string |
|||
writer.write(indent+"<" + self.tagName) |
|||
|
|||
attrs = self._get_attributes() |
|||
a_names = attrs.keys() |
|||
a_names.sort() |
|||
|
|||
for a_name in a_names: |
|||
writer.write(" %s=\"" % a_name) |
|||
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) |
|||
writer.write("\"") |
|||
if self.childNodes: |
|||
writer.write(">%s" % newl) |
|||
for node in self.childNodes: |
|||
node.writexml(writer, indent + addindent, addindent, newl) |
|||
writer.write("%s</%s>%s" % (indent, self.tagName, newl)) |
|||
else: |
|||
writer.write("/>%s" % newl) |
|||
|
|||
|
|||
class XmlFix(object): |
|||
"""Object to manage temporary patching of xml.dom.minidom.""" |
|||
|
|||
def __init__(self): |
|||
# Preserve current xml.dom.minidom functions. |
|||
self.write_data = xml.dom.minidom._write_data |
|||
self.writexml = xml.dom.minidom.Element.writexml |
|||
# Inject replacement versions of a function and a method. |
|||
xml.dom.minidom._write_data = _Replacement_write_data |
|||
xml.dom.minidom.Element.writexml = _Replacement_writexml |
|||
|
|||
def Cleanup(self): |
|||
if self.write_data: |
|||
xml.dom.minidom._write_data = self.write_data |
|||
xml.dom.minidom.Element.writexml = self.writexml |
|||
self.write_data = None |
|||
|
|||
def __del__(self): |
|||
self.Cleanup() |
@ -0,0 +1,26 @@ |
|||
#!/usr/bin/env python |
|||
|
|||
# Copyright (c) 2009 Google Inc. All rights reserved. |
|||
# Use of this source code is governed by a BSD-style license that can be |
|||
# found in the LICENSE file. |
|||
|
|||
from distutils.core import setup |
|||
from distutils.command.install import install |
|||
from distutils.command.install_lib import install_lib |
|||
from distutils.command.install_scripts import install_scripts |
|||
|
|||
setup( |
|||
name='gyp', |
|||
version='0.1', |
|||
description='Generate Your Projects', |
|||
author='Chromium Authors', |
|||
author_email='chromium-dev@googlegroups.com', |
|||
url='http://code.google.com/p/gyp', |
|||
package_dir = {'': 'pylib'}, |
|||
packages=['gyp', 'gyp.generator'], |
|||
|
|||
scripts = ['gyp'], |
|||
cmdclass = {'install': install, |
|||
'install_lib': install_lib, |
|||
'install_scripts': install_scripts}, |
|||
) |
@ -0,0 +1,39 @@ |
|||
#!/usr/bin/env python |
|||
import glob |
|||
import os |
|||
import shlex |
|||
import sys |
|||
|
|||
tool_dir = os.path.dirname(__file__) |
|||
node_root = os.path.normpath(os.path.join(tool_dir, os.pardir)) |
|||
|
|||
sys.path.insert(0, os.path.join(tool_dir, 'gyp', 'pylib')) |
|||
import gyp |
|||
|
|||
# Directory within which we want all generated files (including Makefiles) |
|||
# to be written. |
|||
output_dir = os.path.join(os.path.abspath(node_root), 'out') |
|||
|
|||
|
|||
def run_gyp(args): |
|||
rc = gyp.main(args) |
|||
if rc != 0: |
|||
print 'Error running GYP' |
|||
sys.exit(rc) |
|||
|
|||
if __name__ == '__main__': |
|||
args = sys.argv[1:] |
|||
args.append(os.path.join(tool_dir, 'all.gyp')) |
|||
args.append('--depth=' + node_root) |
|||
|
|||
# Tell gyp to write the Makefiles into output_dir |
|||
args.extend(['--generator-output', output_dir]) |
|||
|
|||
# Tell make to write its output into the same dir |
|||
args.extend(['-Goutput_dir=' + output_dir]) |
|||
|
|||
args.append('-Dtarget_arch=x64') |
|||
args.append('-Dcomponent=static_library') |
|||
args.append('-Dlibrary=static_library') |
|||
gyp_args = list(args) |
|||
run_gyp(gyp_args) |
Loading…
Reference in new issue