mirror of https://github.com/lukechilds/node.git
Browse Source
Among other things, this should make it easier for people to build node.js on openbsd.v0.10.2-release
23 changed files with 722 additions and 500 deletions
@ -1,98 +0,0 @@ |
|||||
#!/usr/bin/env python |
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved. |
|
||||
# Use of this source code is governed by a BSD-style license that can be |
|
||||
# found in the LICENSE file. |
|
||||
|
|
||||
|
|
||||
"""Argument-less script to select what to run on the buildbots.""" |
|
||||
|
|
||||
|
|
||||
import os |
|
||||
import shutil |
|
||||
import subprocess |
|
||||
import sys |
|
||||
|
|
||||
|
|
||||
if sys.platform in ['win32', 'cygwin']: |
|
||||
EXE_SUFFIX = '.exe' |
|
||||
else: |
|
||||
EXE_SUFFIX = '' |
|
||||
|
|
||||
|
|
||||
BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) |
|
||||
TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) |
|
||||
ROOT_DIR = os.path.dirname(TRUNK_DIR) |
|
||||
OUT_DIR = os.path.join(TRUNK_DIR, 'out') |
|
||||
|
|
||||
|
|
||||
def GypTestFormat(title, format=None, msvs_version=None): |
|
||||
"""Run the gyp tests for a given format, emitting annotator tags. |
|
||||
|
|
||||
See annotator docs at: |
|
||||
https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations |
|
||||
Args: |
|
||||
format: gyp format to test. |
|
||||
Returns: |
|
||||
0 for sucesss, 1 for failure. |
|
||||
""" |
|
||||
if not format: |
|
||||
format = title |
|
||||
|
|
||||
print '@@@BUILD_STEP ' + title + '@@@' |
|
||||
sys.stdout.flush() |
|
||||
env = os.environ.copy() |
|
||||
# TODO(bradnelson): remove this when this issue is resolved: |
|
||||
# http://code.google.com/p/chromium/issues/detail?id=108251 |
|
||||
if format == 'ninja': |
|
||||
env['NOGOLD'] = '1' |
|
||||
if msvs_version: |
|
||||
env['GYP_MSVS_VERSION'] = msvs_version |
|
||||
retcode = subprocess.call(' '.join( |
|
||||
[sys.executable, 'trunk/gyptest.py', |
|
||||
'--all', |
|
||||
'--passed', |
|
||||
'--format', format, |
|
||||
'--chdir', 'trunk', |
|
||||
'--path', '../scons']), |
|
||||
cwd=ROOT_DIR, env=env, shell=True) |
|
||||
if retcode: |
|
||||
# Emit failure tag, and keep going. |
|
||||
print '@@@STEP_FAILURE@@@' |
|
||||
return 1 |
|
||||
return 0 |
|
||||
|
|
||||
|
|
||||
def GypBuild(): |
|
||||
# Dump out/ directory. |
|
||||
print '@@@BUILD_STEP cleanup@@@' |
|
||||
print 'Removing %s...' % OUT_DIR |
|
||||
shutil.rmtree(OUT_DIR, ignore_errors=True) |
|
||||
print 'Done.' |
|
||||
|
|
||||
retcode = 0 |
|
||||
if sys.platform.startswith('linux'): |
|
||||
retcode += GypTestFormat('ninja') |
|
||||
retcode += GypTestFormat('scons') |
|
||||
retcode += GypTestFormat('make') |
|
||||
elif sys.platform == 'darwin': |
|
||||
retcode += GypTestFormat('ninja') |
|
||||
retcode += GypTestFormat('xcode') |
|
||||
retcode += GypTestFormat('make') |
|
||||
elif sys.platform == 'win32': |
|
||||
retcode += GypTestFormat('ninja') |
|
||||
retcode += GypTestFormat('msvs-2008', format='msvs', msvs_version='2008') |
|
||||
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': |
|
||||
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010') |
|
||||
else: |
|
||||
raise Exception('Unknown platform') |
|
||||
if retcode: |
|
||||
# TODO(bradnelson): once the annotator supports a postscript (section for |
|
||||
# after the build proper that could be used for cumulative failures), |
|
||||
# use that instead of this. This isolates the final return value so |
|
||||
# that it isn't misattributed to the last stage. |
|
||||
print '@@@BUILD_STEP failures@@@' |
|
||||
sys.exit(retcode) |
|
||||
|
|
||||
|
|
||||
if __name__ == '__main__': |
|
||||
GypBuild() |
|
@ -0,0 +1,12 @@ |
|||||
|
// Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
|
// Use of this source code is governed by a BSD-style license that can be
|
||||
|
// found in the LICENSE file.
|
||||
|
|
||||
|
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
|
||||
|
// then used during the final link for modules that have large PDBs. Otherwise,
|
||||
|
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
|
||||
|
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
|
||||
|
// (rather than the linker), this limit is avoided. With this in place PDBs may
|
||||
|
// grow to 2GB.
|
||||
|
//
|
||||
|
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
|
@ -0,0 +1,212 @@ |
|||||
|
# Copyright (c) 2013 Google Inc. All rights reserved. |
||||
|
# Use of this source code is governed by a BSD-style license that can be |
||||
|
# found in the LICENSE file. |
||||
|
|
||||
|
"""Utility functions shared amongst the Windows generators.""" |
||||
|
|
||||
|
import copy |
||||
|
import os |
||||
|
|
||||
|
|
||||
|
_TARGET_TYPE_EXT = { |
||||
|
'executable': '.exe', |
||||
|
'shared_library': '.dll' |
||||
|
} |
||||
|
|
||||
|
|
||||
|
def _GetLargePdbShimCcPath(): |
||||
|
"""Returns the path of the large_pdb_shim.cc file.""" |
||||
|
this_dir = os.path.abspath(os.path.dirname(__file__)) |
||||
|
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..')) |
||||
|
win_data_dir = os.path.join(src_dir, 'data', 'win') |
||||
|
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc') |
||||
|
return large_pdb_shim_cc |
||||
|
|
||||
|
|
||||
|
def _DeepCopySomeKeys(in_dict, keys): |
||||
|
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. |
||||
|
|
||||
|
Arguments: |
||||
|
in_dict: The dictionary to copy. |
||||
|
keys: The keys to be copied. If a key is in this list and doesn't exist in |
||||
|
|in_dict| this is not an error. |
||||
|
Returns: |
||||
|
The partially deep-copied dictionary. |
||||
|
""" |
||||
|
d = {} |
||||
|
for key in keys: |
||||
|
if key not in in_dict: |
||||
|
continue |
||||
|
d[key] = copy.deepcopy(in_dict[key]) |
||||
|
return d |
||||
|
|
||||
|
|
||||
|
def _SuffixName(name, suffix): |
||||
|
"""Add a suffix to the end of a target. |
||||
|
|
||||
|
Arguments: |
||||
|
name: name of the target (foo#target) |
||||
|
suffix: the suffix to be added |
||||
|
Returns: |
||||
|
Target name with suffix added (foo_suffix#target) |
||||
|
""" |
||||
|
parts = name.rsplit('#', 1) |
||||
|
parts[0] = '%s_%s' % (parts[0], suffix) |
||||
|
return '#'.join(parts) |
||||
|
|
||||
|
|
||||
|
def _ShardName(name, number): |
||||
|
"""Add a shard number to the end of a target. |
||||
|
|
||||
|
Arguments: |
||||
|
name: name of the target (foo#target) |
||||
|
number: shard number |
||||
|
Returns: |
||||
|
Target name with shard added (foo_1#target) |
||||
|
""" |
||||
|
return _SuffixName(name, str(number)) |
||||
|
|
||||
|
|
||||
|
def ShardTargets(target_list, target_dicts): |
||||
|
"""Shard some targets apart to work around the linkers limits. |
||||
|
|
||||
|
Arguments: |
||||
|
target_list: List of target pairs: 'base/base.gyp:base'. |
||||
|
target_dicts: Dict of target properties keyed on target pair. |
||||
|
Returns: |
||||
|
Tuple of the new sharded versions of the inputs. |
||||
|
""" |
||||
|
# Gather the targets to shard, and how many pieces. |
||||
|
targets_to_shard = {} |
||||
|
for t in target_dicts: |
||||
|
shards = int(target_dicts[t].get('msvs_shard', 0)) |
||||
|
if shards: |
||||
|
targets_to_shard[t] = shards |
||||
|
# Shard target_list. |
||||
|
new_target_list = [] |
||||
|
for t in target_list: |
||||
|
if t in targets_to_shard: |
||||
|
for i in range(targets_to_shard[t]): |
||||
|
new_target_list.append(_ShardName(t, i)) |
||||
|
else: |
||||
|
new_target_list.append(t) |
||||
|
# Shard target_dict. |
||||
|
new_target_dicts = {} |
||||
|
for t in target_dicts: |
||||
|
if t in targets_to_shard: |
||||
|
for i in range(targets_to_shard[t]): |
||||
|
name = _ShardName(t, i) |
||||
|
new_target_dicts[name] = copy.copy(target_dicts[t]) |
||||
|
new_target_dicts[name]['target_name'] = _ShardName( |
||||
|
new_target_dicts[name]['target_name'], i) |
||||
|
sources = new_target_dicts[name].get('sources', []) |
||||
|
new_sources = [] |
||||
|
for pos in range(i, len(sources), targets_to_shard[t]): |
||||
|
new_sources.append(sources[pos]) |
||||
|
new_target_dicts[name]['sources'] = new_sources |
||||
|
else: |
||||
|
new_target_dicts[t] = target_dicts[t] |
||||
|
# Shard dependencies. |
||||
|
for t in new_target_dicts: |
||||
|
dependencies = copy.copy(new_target_dicts[t].get('dependencies', [])) |
||||
|
new_dependencies = [] |
||||
|
for d in dependencies: |
||||
|
if d in targets_to_shard: |
||||
|
for i in range(targets_to_shard[d]): |
||||
|
new_dependencies.append(_ShardName(d, i)) |
||||
|
else: |
||||
|
new_dependencies.append(d) |
||||
|
new_target_dicts[t]['dependencies'] = new_dependencies |
||||
|
|
||||
|
return (new_target_list, new_target_dicts) |
||||
|
|
||||
|
|
||||
|
def InsertLargePdbShims(target_list, target_dicts, vars): |
||||
|
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs. |
||||
|
|
||||
|
This is a workaround for targets with PDBs greater than 1GB in size, the |
||||
|
limit for the 1KB pagesize PDBs created by the linker by default. |
||||
|
|
||||
|
Arguments: |
||||
|
target_list: List of target pairs: 'base/base.gyp:base'. |
||||
|
target_dicts: Dict of target properties keyed on target pair. |
||||
|
vars: A dictionary of common GYP variables with generator-specific values. |
||||
|
Returns: |
||||
|
Tuple of the shimmed version of the inputs. |
||||
|
""" |
||||
|
# Determine which targets need shimming. |
||||
|
targets_to_shim = [] |
||||
|
for t in target_dicts: |
||||
|
target_dict = target_dicts[t] |
||||
|
# We only want to shim targets that have msvs_large_pdb enabled. |
||||
|
if not int(target_dict.get('msvs_large_pdb', 0)): |
||||
|
continue |
||||
|
# This is intended for executable, shared_library and loadable_module |
||||
|
# targets where every configuration is set up to produce a PDB output. |
||||
|
# If any of these conditions is not true then the shim logic will fail |
||||
|
# below. |
||||
|
targets_to_shim.append(t) |
||||
|
|
||||
|
large_pdb_shim_cc = _GetLargePdbShimCcPath() |
||||
|
|
||||
|
for t in targets_to_shim: |
||||
|
target_dict = target_dicts[t] |
||||
|
target_name = target_dict.get('target_name') |
||||
|
|
||||
|
base_dict = _DeepCopySomeKeys(target_dict, |
||||
|
['configurations', 'default_configuration', 'toolset']) |
||||
|
|
||||
|
# This is the dict for copying the source file (part of the GYP tree) |
||||
|
# to the intermediate directory of the project. This is necessary because |
||||
|
# we can't always build a relative path to the shim source file (on Windows |
||||
|
# GYP and the project may be on different drives), and Ninja hates absolute |
||||
|
# paths (it ends up generating the .obj and .obj.d alongside the source |
||||
|
# file, polluting GYPs tree). |
||||
|
copy_suffix = '_large_pdb_copy' |
||||
|
copy_target_name = target_name + '_' + copy_suffix |
||||
|
full_copy_target_name = _SuffixName(t, copy_suffix) |
||||
|
shim_cc_basename = os.path.basename(large_pdb_shim_cc) |
||||
|
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name |
||||
|
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename |
||||
|
copy_dict = copy.deepcopy(base_dict) |
||||
|
copy_dict['target_name'] = copy_target_name |
||||
|
copy_dict['type'] = 'none' |
||||
|
copy_dict['sources'] = [ large_pdb_shim_cc ] |
||||
|
copy_dict['copies'] = [{ |
||||
|
'destination': shim_cc_dir, |
||||
|
'files': [ large_pdb_shim_cc ] |
||||
|
}] |
||||
|
|
||||
|
# This is the dict for the PDB generating shim target. It depends on the |
||||
|
# copy target. |
||||
|
shim_suffix = '_large_pdb_shim' |
||||
|
shim_target_name = target_name + '_' + shim_suffix |
||||
|
full_shim_target_name = _SuffixName(t, shim_suffix) |
||||
|
shim_dict = copy.deepcopy(base_dict) |
||||
|
shim_dict['target_name'] = shim_target_name |
||||
|
shim_dict['type'] = 'static_library' |
||||
|
shim_dict['sources'] = [ shim_cc_path ] |
||||
|
shim_dict['dependencies'] = [ full_copy_target_name ] |
||||
|
|
||||
|
# Set up the shim to output its PDB to the same location as the final linker |
||||
|
# target. |
||||
|
for config in shim_dict.get('configurations').itervalues(): |
||||
|
msvs = config.setdefault('msvs_settings') |
||||
|
|
||||
|
linker = msvs.pop('VCLinkerTool') # We want to clear this dict. |
||||
|
pdb_path = linker.get('ProgramDatabaseFile') |
||||
|
|
||||
|
compiler = msvs.setdefault('VCCLCompilerTool', {}) |
||||
|
compiler.setdefault('DebugInformationFormat', '3') |
||||
|
compiler.setdefault('ProgramDataBaseFileName', pdb_path) |
||||
|
|
||||
|
# Add the new targets. |
||||
|
target_list.append(full_copy_target_name) |
||||
|
target_list.append(full_shim_target_name) |
||||
|
target_dicts[full_copy_target_name] = copy_dict |
||||
|
target_dicts[full_shim_target_name] = shim_dict |
||||
|
|
||||
|
# Update the original target to depend on the shim target. |
||||
|
target_dict.setdefault('dependencies', []).append(full_shim_target_name) |
||||
|
|
||||
|
return (target_list, target_dicts) |
Loading…
Reference in new issue